def test_chooser_throws_an_error_if_package_hashes_do_not_match( env, mock_pypi, mock_legacy, source_type, pool, ): chooser = Chooser(pool, env) package = Package("isort", "4.3.4") files = [{ "hash": "sha256:0000000000000000000000000000000000000000000000000000000000000000", "filename": "isort-4.3.4.tar.gz", }] if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) package.files = files with pytest.raises(RuntimeError) as e: chooser.choose_for(package) assert files[0]["hash"] in str(e)
def test_chooser_chooses_distributions_that_match_the_package_hashes( env, mock_pypi, mock_legacy, source_type, pool, ): chooser = Chooser(pool, env) package = Package("isort", "4.3.4") files = [ { "hash": "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", "filename": "isort-4.3.4.tar.gz", } ] if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) package.files = files link = chooser.choose_for(package) assert "isort-4.3.4.tar.gz" == link.filename
def test_chooser_no_binary_policy( env: MockEnv, mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool, policy: str, filename: str, config: Config, ): config.merge({"installer": {"no-binary": policy.split(",")}}) chooser = Chooser(pool, env, config) package = Package("pytest", "3.5.0") if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) link = chooser.choose_for(package) assert link.filename == filename
def test_chooser_chooses_universal_wheel_link_if_available( env, mock_pypi, mock_legacy, source_type, pool): chooser = Chooser(pool, env) package = Package("pytest", "3.5.0") if source_type == "legacy": package.source_type = "legacy" package.source_reference = "foo" package.source_url = "https://foo.bar/simple/" link = chooser.choose_for(package) assert "pytest-3.5.0-py2.py3-none-any.whl" == link.filename
def test_chooser_chooses_sdist_if_no_compatible_wheel_link_is_available( env, mock_pypi, mock_legacy, source_type, pool, ): chooser = Chooser(pool, env) package = Package("pyyaml", "3.13.0") if source_type == "legacy": package.source_type = "legacy" package.source_reference = "foo" package.source_url = "https://foo.bar/simple/" link = chooser.choose_for(package) assert "PyYAML-3.13.tar.gz" == link.filename
def test_chooser_chooses_specific_python_universal_wheel_link_if_available( env, mock_pypi, mock_legacy, source_type, pool): chooser = Chooser(pool, env) package = Package("isort", "4.3.4") if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) link = chooser.choose_for(package) assert "isort-4.3.4-py3-none-any.whl" == link.filename
def test_chooser_chooses_system_specific_wheel_link_if_available( mock_pypi, mock_legacy, source_type, pool ): env = MockEnv( supported_tags=[Tag("cp37", "cp37m", "win32"), Tag("py3", "none", "any")] ) chooser = Chooser(pool, env) package = Package("pyyaml", "3.13.0") if source_type == "legacy": package.source_type = "legacy" package.source_reference = "foo" package.source_url = "https://foo.bar/simple/" link = chooser.choose_for(package) assert "PyYAML-3.13-cp37-cp37m-win32.whl" == link.filename
def test_chooser_chooses_universal_wheel_link_if_available( env: MockEnv, mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool ): chooser = Chooser(pool, env) package = Package("pytest", "3.5.0") if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) link = chooser.choose_for(package) assert link.filename == "pytest-3.5.0-py2.py3-none-any.whl"
def test_chooser_chooses_sdist_if_no_compatible_wheel_link_is_available( env: MockEnv, mock_pypi: None, mock_legacy: None, source_type: str, pool: Pool, ): chooser = Chooser(pool, env) package = Package("pyyaml", "3.13.0") if source_type == "legacy": package = Package( package.name, package.version.text, source_type="legacy", source_reference="foo", source_url="https://foo.bar/simple/", ) link = chooser.choose_for(package) assert link.filename == "PyYAML-3.13.tar.gz"
class Executor: def __init__( self, env: Env, pool: Pool, config: Config, io: IO, parallel: bool = None, ) -> None: self._env = env self._io = io self._dry_run = False self._enabled = True self._verbose = False self._authenticator = Authenticator(config, self._io) self._chef = Chef(config, self._env) self._chooser = Chooser(pool, self._env, config) if parallel is None: parallel = config.get("installer.parallel", True) if parallel: self._max_workers = self._get_max_workers( desired_max_workers=config.get("installer.max-workers")) else: self._max_workers = 1 self._executor = ThreadPoolExecutor(max_workers=self._max_workers) self._total_operations = 0 self._executed_operations = 0 self._executed = {"install": 0, "update": 0, "uninstall": 0} self._skipped = {"install": 0, "update": 0, "uninstall": 0} self._sections = {} self._lock = threading.Lock() self._shutdown = False self._hashes: dict[str, str] = {} @property def installations_count(self) -> int: return self._executed["install"] @property def updates_count(self) -> int: return self._executed["update"] @property def removals_count(self) -> int: return self._executed["uninstall"] def supports_fancy_output(self) -> bool: return self._io.output.is_decorated() and not self._dry_run def disable(self) -> Executor: self._enabled = False return self def dry_run(self, dry_run: bool = True) -> Executor: self._dry_run = dry_run return self def verbose(self, verbose: bool = True) -> Executor: self._verbose = verbose return self def pip_install(self, req: Path | Link, upgrade: bool = False, editable: bool = False) -> int: try: pip_install(req, self._env, upgrade=upgrade, editable=editable) except EnvCommandError as e: output = decode(e.e.output) if ("KeyboardInterrupt" in output or "ERROR: Operation cancelled by user" in output): return -2 raise return 0 def execute(self, operations: list[Operation]) -> int: self._total_operations = len(operations) for job_type in self._executed: self._executed[job_type] = 0 self._skipped[job_type] = 0 if operations and (self._enabled or self._dry_run): self._display_summary(operations) # We group operations by priority groups = itertools.groupby(operations, key=lambda o: -o.priority) self._sections = {} for _, group in groups: tasks = [] serial_operations = [] for operation in group: if self._shutdown: break # Some operations are unsafe, we must execute them serially in a group # https://github.com/python-poetry/poetry/issues/3086 # https://github.com/python-poetry/poetry/issues/2658 # # We need to explicitly check source type here, see: # https://github.com/python-poetry/poetry-core/pull/98 is_parallel_unsafe = operation.job_type == "uninstall" or ( operation.package.develop and operation.package.source_type in {"directory", "git"}) if not operation.skipped and is_parallel_unsafe: serial_operations.append(operation) continue tasks.append( self._executor.submit(self._execute_operation, operation)) try: wait(tasks) for operation in serial_operations: wait([ self._executor.submit(self._execute_operation, operation) ]) except KeyboardInterrupt: self._shutdown = True if self._shutdown: # Cancelling further tasks from being executed [task.cancel() for task in tasks] self._executor.shutdown(wait=True) break return 1 if self._shutdown else 0 @staticmethod def _get_max_workers(desired_max_workers: int | None = None) -> int: # This should be directly handled by ThreadPoolExecutor # however, on some systems the number of CPUs cannot be determined # (it raises a NotImplementedError), so, in this case, we assume # that the system only has one CPU. try: default_max_workers = os.cpu_count() + 4 except NotImplementedError: default_max_workers = 5 if desired_max_workers is None: return default_max_workers return min(default_max_workers, desired_max_workers) def _write(self, operation: Operation, line: str) -> None: if not self.supports_fancy_output( ) or not self._should_write_operation(operation): return if self._io.is_debug(): with self._lock: section = self._sections[id(operation)] section.write_line(line) return with self._lock: section = self._sections[id(operation)] section.clear() section.write(line) def _execute_operation(self, operation: Operation) -> None: try: op_message = self.get_operation_message(operation) if self.supports_fancy_output(): if id(operation ) not in self._sections and self._should_write_operation( operation): with self._lock: self._sections[id(operation)] = self._io.section() self._sections[id(operation)].write_line( f" <fg=blue;options=bold>•</> {op_message}:" " <fg=blue>Pending...</>") else: if self._should_write_operation(operation): if not operation.skipped: self._io.write_line( f" <fg=blue;options=bold>•</> {op_message}") else: self._io.write_line( f" <fg=default;options=bold,dark>•</> {op_message}: " "<fg=default;options=bold,dark>Skipped</> " "<fg=default;options=dark>for the following reason:</> " f"<fg=default;options=bold,dark>{operation.skip_reason}</>" ) try: result = self._do_execute_operation(operation) except EnvCommandError as e: if e.e.returncode == -2: result = -2 else: raise # If we have a result of -2 it means a KeyboardInterrupt # in the any python subprocess, so we raise a KeyboardInterrupt # error to be picked up by the error handler. if result == -2: raise KeyboardInterrupt except Exception as e: try: from cleo.ui.exception_trace import ExceptionTrace if not self.supports_fancy_output(): io = self._io else: message = ( " <error>•</error>" f" {self.get_operation_message(operation, error=True)}:" " <error>Failed</error>") self._write(operation, message) io = self._sections.get(id(operation), self._io) with self._lock: trace = ExceptionTrace(e) trace.render(io) io.write_line("") finally: with self._lock: self._shutdown = True except KeyboardInterrupt: try: message = ( " <warning>•</warning>" f" {self.get_operation_message(operation, warning=True)}:" " <warning>Cancelled</warning>") if not self.supports_fancy_output(): self._io.write_line(message) else: self._write(operation, message) finally: with self._lock: self._shutdown = True def _do_execute_operation(self, operation: Operation) -> int: method = operation.job_type operation_message = self.get_operation_message(operation) if operation.skipped: if self.supports_fancy_output(): self._write( operation, f" <fg=default;options=bold,dark>•</> {operation_message}: " "<fg=default;options=bold,dark>Skipped</> " "<fg=default;options=dark>for the following reason:</> " f"<fg=default;options=bold,dark>{operation.skip_reason}</>", ) self._skipped[operation.job_type] += 1 return 0 if not self._enabled or self._dry_run: self._io.write_line( f" <fg=blue;options=bold>•</> {operation_message}") return 0 result = getattr(self, f"_execute_{method}")(operation) if result != 0: return result operation_message = self.get_operation_message(operation, done=True) message = f" <fg=green;options=bold>•</> {operation_message}" self._write(operation, message) self._increment_operations_count(operation, True) return result def _increment_operations_count(self, operation: Operation, executed: bool) -> None: with self._lock: if executed: self._executed_operations += 1 self._executed[operation.job_type] += 1 else: self._skipped[operation.job_type] += 1 def run_pip(self, *args: Any, **kwargs: Any) -> int: try: self._env.run_pip(*args, **kwargs) except EnvCommandError as e: output = decode(e.e.output) if ("KeyboardInterrupt" in output or "ERROR: Operation cancelled by user" in output): return -2 raise return 0 def get_operation_message( self, operation: Operation, done: bool = False, error: bool = False, warning: bool = False, ) -> str: base_tag = "fg=default" operation_color = "c2" source_operation_color = "c2" package_color = "c1" if error: operation_color = "error" elif warning: operation_color = "warning" elif done: operation_color = "success" if operation.skipped: base_tag = "fg=default;options=dark" operation_color += "_dark" source_operation_color += "_dark" package_color += "_dark" if operation.job_type == "install": return ( f"<{base_tag}>Installing" f" <{package_color}>{operation.package.name}</{package_color}>" f" (<{operation_color}>{operation.package.full_pretty_version}</>)</>" ) if operation.job_type == "uninstall": return ( f"<{base_tag}>Removing" f" <{package_color}>{operation.package.name}</{package_color}>" f" (<{operation_color}>{operation.package.full_pretty_version}</>)</>" ) if operation.job_type == "update": return ( f"<{base_tag}>Updating" f" <{package_color}>{operation.initial_package.name}</{package_color}> " f"(<{source_operation_color}>" f"{operation.initial_package.full_pretty_version}" f"</{source_operation_color}> -> <{operation_color}>" f"{operation.target_package.full_pretty_version}</>)</>") return "" def _display_summary(self, operations: list[Operation]) -> None: installs = 0 updates = 0 uninstalls = 0 skipped = 0 for op in operations: if op.skipped: skipped += 1 continue if op.job_type == "install": installs += 1 elif op.job_type == "update": updates += 1 elif op.job_type == "uninstall": uninstalls += 1 if not installs and not updates and not uninstalls and not self._verbose: self._io.write_line("") self._io.write_line("No dependencies to install or update") return self._io.write_line("") self._io.write("<b>Package operations</b>: ") self._io.write(f"<info>{installs}</> install{pluralize(installs)}, ") self._io.write(f"<info>{updates}</> update{pluralize(updates)}, ") self._io.write(f"<info>{uninstalls}</> removal{pluralize(uninstalls)}") if skipped and self._verbose: self._io.write(f", <info>{skipped}</> skipped") self._io.write_line("") self._io.write_line("") def _execute_install(self, operation: Install | Update) -> int: status_code = self._install(operation) self._save_url_reference(operation) return status_code def _execute_update(self, operation: Install | Update) -> int: status_code = self._update(operation) self._save_url_reference(operation) return status_code def _execute_uninstall(self, operation: Uninstall) -> int: op_msg = self.get_operation_message(operation) message = f" <fg=blue;options=bold>•</> {op_msg}: <info>Removing...</info>" self._write(operation, message) return self._remove(operation) def _install(self, operation: Install | Update) -> int: package = operation.package if package.source_type == "directory": return self._install_directory(operation) if package.source_type == "git": return self._install_git(operation) if package.source_type == "file": archive = self._prepare_file(operation) elif package.source_type == "url": archive = self._download_link(operation, Link(package.source_url)) else: archive = self._download(operation) operation_message = self.get_operation_message(operation) message = (f" <fg=blue;options=bold>•</> {operation_message}:" " <info>Installing...</info>") self._write(operation, message) return self.pip_install(archive, upgrade=operation.job_type == "update") def _update(self, operation: Install | Update) -> int: return self._install(operation) def _remove(self, operation: Uninstall) -> int: package = operation.package # If we have a VCS package, remove its source directory if package.source_type == "git": src_dir = self._env.path / "src" / package.name if src_dir.exists(): remove_directory(src_dir, force=True) try: return self.run_pip("uninstall", package.name, "-y") except CalledProcessError as e: if "not installed" in str(e): return 0 raise def _prepare_file(self, operation: Install | Update) -> Path: package = operation.package operation_message = self.get_operation_message(operation) message = (f" <fg=blue;options=bold>•</> {operation_message}:" " <info>Preparing...</info>") self._write(operation, message) archive = Path(package.source_url) if not Path(package.source_url).is_absolute() and package.root_dir: archive = package.root_dir / archive archive = self._chef.prepare(archive) return archive def _install_directory(self, operation: Install | Update) -> int: from poetry.factory import Factory package = operation.package operation_message = self.get_operation_message(operation) message = (f" <fg=blue;options=bold>•</> {operation_message}:" " <info>Building...</info>") self._write(operation, message) if package.root_dir: req = package.root_dir / package.source_url else: req = Path(package.source_url).resolve(strict=False) pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml")) if pyproject.is_poetry_project(): # Even if there is a build system specified # some versions of pip (< 19.0.0) don't understand it # so we need to check the version of pip to know # if we can rely on the build system legacy_pip = (self._env.pip_version < self._env.pip_version.__class__.from_parts(19, 0, 0)) package_poetry = Factory().create_poetry( pyproject.file.path.parent) if package.develop and not package_poetry.package.build_script: from poetry.masonry.builders.editable import EditableBuilder # This is a Poetry package in editable mode # we can use the EditableBuilder without going through pip # to install it, unless it has a build script. builder = EditableBuilder(package_poetry, self._env, NullIO()) builder.build() return 0 elif legacy_pip or package_poetry.package.build_script: from poetry.core.masonry.builders.sdist import SdistBuilder # We need to rely on creating a temporary setup.py # file since the version of pip does not support # build-systems # We also need it for non-PEP-517 packages builder = SdistBuilder(package_poetry) with builder.setup_py(): if package.develop: return self.pip_install(req, upgrade=True, editable=True) return self.pip_install(req, upgrade=True) if package.develop: return self.pip_install(req, upgrade=True, editable=True) return self.pip_install(req, upgrade=True) def _install_git(self, operation: Install | Update) -> int: from poetry.vcs.git import Git package = operation.package operation_message = self.get_operation_message(operation) message = ( f" <fg=blue;options=bold>•</> {operation_message}: <info>Cloning...</info>" ) self._write(operation, message) source = Git.clone( url=package.source_url, source_root=self._env.path / "src", revision=package.source_resolved_reference or package.source_reference, ) # Now we just need to install from the source directory original_url = package.source_url package._source_url = str(source.path) status_code = self._install_directory(operation) package._source_url = original_url return status_code def _download(self, operation: Install | Update) -> Link: link = self._chooser.choose_for(operation.package) return self._download_link(operation, link) def _download_link(self, operation: Install | Update, link: Link) -> Link: package = operation.package archive = self._chef.get_cached_archive_for_link(link) if archive is link: # No cached distributions was found, so we download and prepare it try: archive = self._download_archive(operation, link) except BaseException: cache_directory = self._chef.get_cache_directory_for_link(link) cached_file = cache_directory.joinpath(link.filename) # We can't use unlink(missing_ok=True) because it's not available # prior to Python 3.8 if cached_file.exists(): cached_file.unlink() raise # TODO: Check readability of the created archive if not link.is_wheel: archive = self._chef.prepare(archive) if package.files: archive_hash = self._validate_archive_hash(archive, package) self._hashes[package.name] = archive_hash return archive @staticmethod def _validate_archive_hash(archive: Path | Link, package: Package) -> str: archive_path = (url_to_path(archive.url) if isinstance(archive, Link) else archive) file_dep = FileDependency( package.name, archive_path, ) archive_hash = "sha256:" + file_dep.hash() known_hashes = {f["hash"] for f in package.files} if archive_hash not in known_hashes: raise RuntimeError( f"Hash for {package} from archive {archive_path.name} not found in" f" known hashes (was: {archive_hash})") return archive_hash def _download_archive(self, operation: Install | Update, link: Link) -> Path: response = self._authenticator.request("get", link.url, stream=True, io=self._sections.get( id(operation), self._io)) wheel_size = response.headers.get("content-length") operation_message = self.get_operation_message(operation) message = ( f" <fg=blue;options=bold>•</> {operation_message}: <info>Downloading...</>" ) progress = None if self.supports_fancy_output(): if wheel_size is None: self._write(operation, message) else: from cleo.ui.progress_bar import ProgressBar progress = ProgressBar(self._sections[id(operation)], max=int(wheel_size)) progress.set_format(message + " <b>%percent%%</b>") if progress: with self._lock: self._sections[id(operation)].clear() progress.start() done = 0 archive = self._chef.get_cache_directory_for_link(link) / link.filename archive.parent.mkdir(parents=True, exist_ok=True) with archive.open("wb") as f: for chunk in response.iter_content(chunk_size=4096): if not chunk: break done += len(chunk) if progress: with self._lock: progress.set_progress(done) f.write(chunk) if progress: with self._lock: progress.finish() return archive def _should_write_operation(self, operation: Operation) -> bool: return not operation.skipped or self._dry_run or self._verbose def _save_url_reference(self, operation: Operation) -> None: """ Create and store a PEP-610 `direct_url.json` file, if needed. """ if operation.job_type not in {"install", "update"}: return package = operation.package if not package.source_url or package.source_type == "legacy": # Since we are installing from our own distribution cache # pip will write a `direct_url.json` file pointing to the cache # distribution. # That's not what we want, so we remove the direct_url.json file, # if it exists. for ( direct_url_json ) in self._env.site_packages.find_distribution_direct_url_json_files( distribution_name=package.name, writable_only=True): # We can't use unlink(missing_ok=True) because it's not always available if direct_url_json.exists(): direct_url_json.unlink() return url_reference = None if package.source_type == "git": url_reference = self._create_git_url_reference(package) elif package.source_type == "url": url_reference = self._create_url_url_reference(package) elif package.source_type == "directory": url_reference = self._create_directory_url_reference(package) elif package.source_type == "file": url_reference = self._create_file_url_reference(package) if url_reference: for dist in self._env.site_packages.distributions( name=package.name, writable_only=True): dist._path.joinpath("direct_url.json").write_text( json.dumps(url_reference), encoding="utf-8", ) record = dist._path.joinpath("RECORD") if record.exists(): with record.open(mode="a", encoding="utf-8") as f: writer = csv.writer(f) writer.writerow([ str( dist._path.joinpath( "direct_url.json").relative_to( record.parent.parent)), "", "", ]) def _create_git_url_reference( self, package: Package) -> dict[str, str | dict[str, str]]: reference = { "url": package.source_url, "vcs_info": { "vcs": "git", "requested_revision": package.source_reference, "commit_id": package.source_resolved_reference, }, } return reference def _create_url_url_reference( self, package: Package) -> dict[str, str | dict[str, str]]: archive_info = {} if package.name in self._hashes: archive_info["hash"] = self._hashes[package.name] reference = {"url": package.source_url, "archive_info": archive_info} return reference def _create_file_url_reference( self, package: Package) -> dict[str, str | dict[str, str]]: archive_info = {} if package.name in self._hashes: archive_info["hash"] = self._hashes[package.name] reference = { "url": Path(package.source_url).as_uri(), "archive_info": archive_info, } return reference def _create_directory_url_reference( self, package: Package) -> dict[str, str | dict[str, str]]: dir_info = {} if package.develop: dir_info["editable"] = True reference = { "url": Path(package.source_url).as_uri(), "dir_info": dir_info, } return reference
def solve_pypi( pip_specs: Dict[str, src_parser.Dependency], use_latest: List[str], pip_locked: Dict[str, src_parser.LockedDependency], conda_locked: Dict[str, src_parser.LockedDependency], python_version: str, platform: str, verbose: bool = False, ) -> Dict[str, src_parser.LockedDependency]: """ Solve pip dependencies for the given platform Parameters ---------- conda : Path to conda, mamba, or micromamba use_latest : Names of packages to update to the latest version compatible with pip_specs pip_specs : PEP440 package specifications pip_locked : Previous solution for the given platform (pip packages only) conda_locked : Current solution of conda-only specs for the given platform python_version : Version of Python in conda_locked platform : Target platform verbose : Print chatter from solver """ dummy_package = ProjectPackage("_dummy_package_", "0.0.0") dependencies = [get_dependency(spec) for spec in pip_specs.values()] for dep in dependencies: dummy_package.add_dependency(dep) pypi = PyPiRepository() pool = Pool(repositories=[pypi]) installed = Repository() locked = Repository() python_packages = dict() for dep in conda_locked.values(): if dep.name.startswith("__"): continue try: pypi_name = conda_name_to_pypi_name(dep.name).lower() except KeyError: continue # Prefer the Python package when its name collides with the Conda package # for the underlying library, e.g. python-xxhash (pypi: xxhash) over xxhash # (pypi: no equivalent) if pypi_name not in python_packages or pypi_name != dep.name: python_packages[pypi_name] = dep.version # treat conda packages as both locked and installed for name, version in python_packages.items(): for repo in (locked, installed): repo.add_package(Package(name=name, version=version)) # treat pip packages as locked only for spec in pip_locked.values(): locked.add_package(get_package(spec)) if verbose: io = ConsoleIO() io.set_verbosity(VERY_VERBOSE) else: io = NullIO() s = Solver( dummy_package, pool=pool, installed=installed, locked=locked, io=io, ) to_update = list({spec.name for spec in pip_locked.values() }.intersection(use_latest)) env = PlatformEnv(python_version, platform) # find platform-specific solution (e.g. dependencies conditioned on markers) with s.use_environment(env): result = s.solve(use_latest=to_update) chooser = Chooser(pool, env=env) # Extract distributions from Poetry package plan, ignoring uninstalls # (usually: conda package with no pypi equivalent) and skipped ops # (already installed) requirements: List[src_parser.LockedDependency] = [] for op in result: if not isinstance(op, Uninstall) and not op.skipped: # Take direct references verbatim source: Optional[src_parser.DependencySource] = None if op.package.source_type == "url": url, fragment = urldefrag(op.package.source_url) hash_type, hash = fragment.split("=") hash = src_parser.HashModel(**{hash_type: hash}) source = src_parser.DependencySource(type="url", url=op.package.source_url) # Choose the most specific distribution for the target else: link = chooser.choose_for(op.package) url = link.url_without_fragment hash = src_parser.HashModel(**{link.hash_name: link.hash}) requirements.append( src_parser.LockedDependency( name=op.package.name, version=str(op.package.version), manager="pip", source=source, platform=platform, dependencies={ dep.name: str(dep.constraint) for dep in op.package.requires }, url=url, hash=hash, )) # use PyPI names of conda packages to walking the dependency tree and propagate # categories from explicit to transitive dependencies planned = { **{dep.name: dep for dep in requirements}, # prefer conda packages so add them afterwards } for conda_name, dep in conda_locked.items(): try: pypi_name = conda_name_to_pypi_name(conda_name).lower() except KeyError: # no conda-name found, assuming conda packages do NOT intersect with the pip package continue planned[pypi_name] = dep src_parser._apply_categories(requested=pip_specs, planned=planned) return {dep.name: dep for dep in requirements}