예제 #1
0
def test_get_cached_archive_for_link(config, mocker):
    chef = Chef(
        config,
        MockEnv(
            version_info=(3, 8, 3),
            marker_env={
                "interpreter_name": "cpython",
                "interpreter_version": "3.8.3"
            },
            supported_tags=[
                Tag("cp38", "cp38", "macosx_10_15_x86_64"),
                Tag("py3", "none", "any"),
            ],
        ),
    )

    cwd = Path.cwd() / ".pypoetrycache"

    mocker.patch.object(
        chef,
        "get_cached_archives_for_link",
        return_value=[
            Link(f"file:///{cwd}demo-0.1.0-py2.py3-none-any"),
            Link(f"file:///{cwd}demo-0.1.0.tar.gz"),
            Link(f"file:///{cwd}demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
            Link(f"file:///{cwd}demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
        ],
    )

    archive = chef.get_cached_archive_for_link(
        Link("https://files.python-poetry.org/demo-0.1.0.tar.gz"))

    assert Link(f"file:///{cwd}demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"
                ) == archive
예제 #2
0
def test_get_cached_archives_for_link(config, mocker):
    chef = Chef(
        config,
        MockEnv(marker_env={
            "interpreter_name": "cpython",
            "interpreter_version": "3.8.3"
        }),
    )

    distributions = Path(__file__).parent.parent.joinpath(
        "fixtures/distributions")
    mocker.patch.object(
        chef,
        "get_cache_directory_for_link",
        return_value=distributions,
    )

    archives = chef.get_cached_archives_for_link(
        Link("https://files.python-poetry.org/demo-0.1.0.tar.gz"))

    assert archives
    assert set(archives) == {
        Link(path.as_uri())
        for path in distributions.glob("demo-0.1.0*")
    }
예제 #3
0
    def __init__(
        self,
        env: Env,
        pool: Pool,
        config: Config,
        io: IO,
        parallel: bool = None,
    ) -> None:
        self._env = env
        self._io = io
        self._dry_run = False
        self._enabled = True
        self._verbose = False
        self._authenticator = Authenticator(config, self._io)
        self._chef = Chef(config, self._env)
        self._chooser = Chooser(pool, self._env, config)

        if parallel is None:
            parallel = config.get("installer.parallel", True)

        if parallel:
            self._max_workers = self._get_max_workers(
                desired_max_workers=config.get("installer.max-workers"))
        else:
            self._max_workers = 1

        self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
        self._total_operations = 0
        self._executed_operations = 0
        self._executed = {"install": 0, "update": 0, "uninstall": 0}
        self._skipped = {"install": 0, "update": 0, "uninstall": 0}
        self._sections = {}
        self._lock = threading.Lock()
        self._shutdown = False
        self._hashes: dict[str, str] = {}
예제 #4
0
def test_get_cached_archive_for_link(config: Config, mocker: MockerFixture,
                                     link: str, cached: str):
    chef = Chef(
        config,
        MockEnv(
            version_info=(3, 8, 3),
            marker_env={
                "interpreter_name": "cpython",
                "interpreter_version": "3.8.3"
            },
            supported_tags=[
                Tag("cp38", "cp38", "macosx_10_15_x86_64"),
                Tag("py3", "none", "any"),
            ],
        ),
    )

    mocker.patch.object(
        chef,
        "get_cached_archives_for_link",
        return_value=[
            Path("/cache/demo-0.1.0-py2.py3-none-any"),
            Path("/cache/demo-0.1.0.tar.gz"),
            Path("/cache/demo-0.1.0-cp38-cp38-macosx_10_15_x86_64.whl"),
            Path("/cache/demo-0.1.0-cp37-cp37-macosx_10_15_x86_64.whl"),
        ],
    )

    archive = chef.get_cached_archive_for_link(Link(link))

    assert Path(cached) == archive
예제 #5
0
def test_get_cache_directory_for_link(config):
    chef = Chef(
        config,
        MockEnv(
            marker_env={"interpreter_name": "cpython", "interpreter_version": "3.8.3"}
        ),
    )

    directory = chef.get_cache_directory_for_link(
        Link("https://files.python-poetry.org/poetry-1.1.0.tar.gz")
    )
    expected = Path(
        "/foo/artifacts/ba/63/13/283a3b3b7f95f05e9e6f84182d276f7bb0951d5b0cc24422b33f7a4648"
    )

    assert expected == directory
예제 #6
0
class Executor:
    def __init__(
        self,
        env: Env,
        pool: Pool,
        config: Config,
        io: IO,
        parallel: bool = None,
    ) -> None:
        self._env = env
        self._io = io
        self._dry_run = False
        self._enabled = True
        self._verbose = False
        self._authenticator = Authenticator(config, self._io)
        self._chef = Chef(config, self._env)
        self._chooser = Chooser(pool, self._env, config)

        if parallel is None:
            parallel = config.get("installer.parallel", True)

        if parallel:
            self._max_workers = self._get_max_workers(
                desired_max_workers=config.get("installer.max-workers"))
        else:
            self._max_workers = 1

        self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
        self._total_operations = 0
        self._executed_operations = 0
        self._executed = {"install": 0, "update": 0, "uninstall": 0}
        self._skipped = {"install": 0, "update": 0, "uninstall": 0}
        self._sections = {}
        self._lock = threading.Lock()
        self._shutdown = False
        self._hashes: dict[str, str] = {}

    @property
    def installations_count(self) -> int:
        return self._executed["install"]

    @property
    def updates_count(self) -> int:
        return self._executed["update"]

    @property
    def removals_count(self) -> int:
        return self._executed["uninstall"]

    def supports_fancy_output(self) -> bool:
        return self._io.output.is_decorated() and not self._dry_run

    def disable(self) -> Executor:
        self._enabled = False

        return self

    def dry_run(self, dry_run: bool = True) -> Executor:
        self._dry_run = dry_run

        return self

    def verbose(self, verbose: bool = True) -> Executor:
        self._verbose = verbose

        return self

    def pip_install(self,
                    req: Path | Link,
                    upgrade: bool = False,
                    editable: bool = False) -> int:
        try:
            pip_install(req, self._env, upgrade=upgrade, editable=editable)
        except EnvCommandError as e:
            output = decode(e.e.output)
            if ("KeyboardInterrupt" in output
                    or "ERROR: Operation cancelled by user" in output):
                return -2
            raise

        return 0

    def execute(self, operations: list[Operation]) -> int:
        self._total_operations = len(operations)
        for job_type in self._executed:
            self._executed[job_type] = 0
            self._skipped[job_type] = 0

        if operations and (self._enabled or self._dry_run):
            self._display_summary(operations)

        # We group operations by priority
        groups = itertools.groupby(operations, key=lambda o: -o.priority)
        self._sections = {}
        for _, group in groups:
            tasks = []
            serial_operations = []
            for operation in group:
                if self._shutdown:
                    break

                # Some operations are unsafe, we must execute them serially in a group
                # https://github.com/python-poetry/poetry/issues/3086
                # https://github.com/python-poetry/poetry/issues/2658
                #
                # We need to explicitly check source type here, see:
                # https://github.com/python-poetry/poetry-core/pull/98
                is_parallel_unsafe = operation.job_type == "uninstall" or (
                    operation.package.develop
                    and operation.package.source_type in {"directory", "git"})
                if not operation.skipped and is_parallel_unsafe:
                    serial_operations.append(operation)
                    continue

                tasks.append(
                    self._executor.submit(self._execute_operation, operation))

            try:
                wait(tasks)

                for operation in serial_operations:
                    wait([
                        self._executor.submit(self._execute_operation,
                                              operation)
                    ])

            except KeyboardInterrupt:
                self._shutdown = True

            if self._shutdown:
                # Cancelling further tasks from being executed
                [task.cancel() for task in tasks]
                self._executor.shutdown(wait=True)

                break

        return 1 if self._shutdown else 0

    @staticmethod
    def _get_max_workers(desired_max_workers: int | None = None) -> int:
        # This should be directly handled by ThreadPoolExecutor
        # however, on some systems the number of CPUs cannot be determined
        # (it raises a NotImplementedError), so, in this case, we assume
        # that the system only has one CPU.
        try:
            default_max_workers = os.cpu_count() + 4
        except NotImplementedError:
            default_max_workers = 5

        if desired_max_workers is None:
            return default_max_workers
        return min(default_max_workers, desired_max_workers)

    def _write(self, operation: Operation, line: str) -> None:
        if not self.supports_fancy_output(
        ) or not self._should_write_operation(operation):
            return

        if self._io.is_debug():
            with self._lock:
                section = self._sections[id(operation)]
                section.write_line(line)

            return

        with self._lock:
            section = self._sections[id(operation)]
            section.clear()
            section.write(line)

    def _execute_operation(self, operation: Operation) -> None:
        try:
            op_message = self.get_operation_message(operation)
            if self.supports_fancy_output():
                if id(operation
                      ) not in self._sections and self._should_write_operation(
                          operation):
                    with self._lock:
                        self._sections[id(operation)] = self._io.section()
                        self._sections[id(operation)].write_line(
                            f"  <fg=blue;options=bold>•</> {op_message}:"
                            " <fg=blue>Pending...</>")
            else:
                if self._should_write_operation(operation):
                    if not operation.skipped:
                        self._io.write_line(
                            f"  <fg=blue;options=bold>•</> {op_message}")
                    else:
                        self._io.write_line(
                            f"  <fg=default;options=bold,dark>•</> {op_message}: "
                            "<fg=default;options=bold,dark>Skipped</> "
                            "<fg=default;options=dark>for the following reason:</> "
                            f"<fg=default;options=bold,dark>{operation.skip_reason}</>"
                        )

            try:
                result = self._do_execute_operation(operation)
            except EnvCommandError as e:
                if e.e.returncode == -2:
                    result = -2
                else:
                    raise

            # If we have a result of -2 it means a KeyboardInterrupt
            # in the any python subprocess, so we raise a KeyboardInterrupt
            # error to be picked up by the error handler.
            if result == -2:
                raise KeyboardInterrupt
        except Exception as e:
            try:
                from cleo.ui.exception_trace import ExceptionTrace

                if not self.supports_fancy_output():
                    io = self._io
                else:
                    message = (
                        "  <error>•</error>"
                        f" {self.get_operation_message(operation, error=True)}:"
                        " <error>Failed</error>")
                    self._write(operation, message)
                    io = self._sections.get(id(operation), self._io)

                with self._lock:
                    trace = ExceptionTrace(e)
                    trace.render(io)
                    io.write_line("")
            finally:
                with self._lock:
                    self._shutdown = True
        except KeyboardInterrupt:
            try:
                message = (
                    "  <warning>•</warning>"
                    f" {self.get_operation_message(operation, warning=True)}:"
                    " <warning>Cancelled</warning>")
                if not self.supports_fancy_output():
                    self._io.write_line(message)
                else:
                    self._write(operation, message)
            finally:
                with self._lock:
                    self._shutdown = True

    def _do_execute_operation(self, operation: Operation) -> int:
        method = operation.job_type

        operation_message = self.get_operation_message(operation)
        if operation.skipped:
            if self.supports_fancy_output():
                self._write(
                    operation,
                    f"  <fg=default;options=bold,dark>•</> {operation_message}: "
                    "<fg=default;options=bold,dark>Skipped</> "
                    "<fg=default;options=dark>for the following reason:</> "
                    f"<fg=default;options=bold,dark>{operation.skip_reason}</>",
                )

            self._skipped[operation.job_type] += 1

            return 0

        if not self._enabled or self._dry_run:
            self._io.write_line(
                f"  <fg=blue;options=bold>•</> {operation_message}")

            return 0

        result = getattr(self, f"_execute_{method}")(operation)

        if result != 0:
            return result

        operation_message = self.get_operation_message(operation, done=True)
        message = f"  <fg=green;options=bold>•</> {operation_message}"
        self._write(operation, message)

        self._increment_operations_count(operation, True)

        return result

    def _increment_operations_count(self, operation: Operation,
                                    executed: bool) -> None:
        with self._lock:
            if executed:
                self._executed_operations += 1
                self._executed[operation.job_type] += 1
            else:
                self._skipped[operation.job_type] += 1

    def run_pip(self, *args: Any, **kwargs: Any) -> int:
        try:
            self._env.run_pip(*args, **kwargs)
        except EnvCommandError as e:
            output = decode(e.e.output)
            if ("KeyboardInterrupt" in output
                    or "ERROR: Operation cancelled by user" in output):
                return -2

            raise

        return 0

    def get_operation_message(
        self,
        operation: Operation,
        done: bool = False,
        error: bool = False,
        warning: bool = False,
    ) -> str:
        base_tag = "fg=default"
        operation_color = "c2"
        source_operation_color = "c2"
        package_color = "c1"

        if error:
            operation_color = "error"
        elif warning:
            operation_color = "warning"
        elif done:
            operation_color = "success"

        if operation.skipped:
            base_tag = "fg=default;options=dark"
            operation_color += "_dark"
            source_operation_color += "_dark"
            package_color += "_dark"

        if operation.job_type == "install":
            return (
                f"<{base_tag}>Installing"
                f" <{package_color}>{operation.package.name}</{package_color}>"
                f" (<{operation_color}>{operation.package.full_pretty_version}</>)</>"
            )

        if operation.job_type == "uninstall":
            return (
                f"<{base_tag}>Removing"
                f" <{package_color}>{operation.package.name}</{package_color}>"
                f" (<{operation_color}>{operation.package.full_pretty_version}</>)</>"
            )

        if operation.job_type == "update":
            return (
                f"<{base_tag}>Updating"
                f" <{package_color}>{operation.initial_package.name}</{package_color}> "
                f"(<{source_operation_color}>"
                f"{operation.initial_package.full_pretty_version}"
                f"</{source_operation_color}> -> <{operation_color}>"
                f"{operation.target_package.full_pretty_version}</>)</>")
        return ""

    def _display_summary(self, operations: list[Operation]) -> None:
        installs = 0
        updates = 0
        uninstalls = 0
        skipped = 0
        for op in operations:
            if op.skipped:
                skipped += 1
                continue

            if op.job_type == "install":
                installs += 1
            elif op.job_type == "update":
                updates += 1
            elif op.job_type == "uninstall":
                uninstalls += 1

        if not installs and not updates and not uninstalls and not self._verbose:
            self._io.write_line("")
            self._io.write_line("No dependencies to install or update")

            return

        self._io.write_line("")
        self._io.write("<b>Package operations</b>: ")
        self._io.write(f"<info>{installs}</> install{pluralize(installs)}, ")
        self._io.write(f"<info>{updates}</> update{pluralize(updates)}, ")
        self._io.write(f"<info>{uninstalls}</> removal{pluralize(uninstalls)}")
        if skipped and self._verbose:
            self._io.write(f", <info>{skipped}</> skipped")
        self._io.write_line("")
        self._io.write_line("")

    def _execute_install(self, operation: Install | Update) -> int:
        status_code = self._install(operation)

        self._save_url_reference(operation)

        return status_code

    def _execute_update(self, operation: Install | Update) -> int:
        status_code = self._update(operation)

        self._save_url_reference(operation)

        return status_code

    def _execute_uninstall(self, operation: Uninstall) -> int:
        op_msg = self.get_operation_message(operation)
        message = f"  <fg=blue;options=bold>•</> {op_msg}: <info>Removing...</info>"
        self._write(operation, message)

        return self._remove(operation)

    def _install(self, operation: Install | Update) -> int:
        package = operation.package
        if package.source_type == "directory":
            return self._install_directory(operation)

        if package.source_type == "git":
            return self._install_git(operation)

        if package.source_type == "file":
            archive = self._prepare_file(operation)
        elif package.source_type == "url":
            archive = self._download_link(operation, Link(package.source_url))
        else:
            archive = self._download(operation)

        operation_message = self.get_operation_message(operation)
        message = (f"  <fg=blue;options=bold>•</> {operation_message}:"
                   " <info>Installing...</info>")
        self._write(operation, message)
        return self.pip_install(archive,
                                upgrade=operation.job_type == "update")

    def _update(self, operation: Install | Update) -> int:
        return self._install(operation)

    def _remove(self, operation: Uninstall) -> int:
        package = operation.package

        # If we have a VCS package, remove its source directory
        if package.source_type == "git":
            src_dir = self._env.path / "src" / package.name
            if src_dir.exists():
                remove_directory(src_dir, force=True)

        try:
            return self.run_pip("uninstall", package.name, "-y")
        except CalledProcessError as e:
            if "not installed" in str(e):
                return 0

            raise

    def _prepare_file(self, operation: Install | Update) -> Path:
        package = operation.package
        operation_message = self.get_operation_message(operation)

        message = (f"  <fg=blue;options=bold>•</> {operation_message}:"
                   " <info>Preparing...</info>")
        self._write(operation, message)

        archive = Path(package.source_url)
        if not Path(package.source_url).is_absolute() and package.root_dir:
            archive = package.root_dir / archive

        archive = self._chef.prepare(archive)

        return archive

    def _install_directory(self, operation: Install | Update) -> int:
        from poetry.factory import Factory

        package = operation.package
        operation_message = self.get_operation_message(operation)

        message = (f"  <fg=blue;options=bold>•</> {operation_message}:"
                   " <info>Building...</info>")
        self._write(operation, message)

        if package.root_dir:
            req = package.root_dir / package.source_url
        else:
            req = Path(package.source_url).resolve(strict=False)

        pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml"))

        if pyproject.is_poetry_project():
            # Even if there is a build system specified
            # some versions of pip (< 19.0.0) don't understand it
            # so we need to check the version of pip to know
            # if we can rely on the build system
            legacy_pip = (self._env.pip_version <
                          self._env.pip_version.__class__.from_parts(19, 0, 0))
            package_poetry = Factory().create_poetry(
                pyproject.file.path.parent)

            if package.develop and not package_poetry.package.build_script:
                from poetry.masonry.builders.editable import EditableBuilder

                # This is a Poetry package in editable mode
                # we can use the EditableBuilder without going through pip
                # to install it, unless it has a build script.
                builder = EditableBuilder(package_poetry, self._env, NullIO())
                builder.build()

                return 0
            elif legacy_pip or package_poetry.package.build_script:
                from poetry.core.masonry.builders.sdist import SdistBuilder

                # We need to rely on creating a temporary setup.py
                # file since the version of pip does not support
                # build-systems
                # We also need it for non-PEP-517 packages
                builder = SdistBuilder(package_poetry)

                with builder.setup_py():
                    if package.develop:
                        return self.pip_install(req,
                                                upgrade=True,
                                                editable=True)
                    return self.pip_install(req, upgrade=True)

        if package.develop:
            return self.pip_install(req, upgrade=True, editable=True)

        return self.pip_install(req, upgrade=True)

    def _install_git(self, operation: Install | Update) -> int:
        from poetry.vcs.git import Git

        package = operation.package
        operation_message = self.get_operation_message(operation)

        message = (
            f"  <fg=blue;options=bold>•</> {operation_message}: <info>Cloning...</info>"
        )
        self._write(operation, message)

        source = Git.clone(
            url=package.source_url,
            source_root=self._env.path / "src",
            revision=package.source_resolved_reference
            or package.source_reference,
        )

        # Now we just need to install from the source directory
        original_url = package.source_url
        package._source_url = str(source.path)

        status_code = self._install_directory(operation)

        package._source_url = original_url

        return status_code

    def _download(self, operation: Install | Update) -> Link:
        link = self._chooser.choose_for(operation.package)

        return self._download_link(operation, link)

    def _download_link(self, operation: Install | Update, link: Link) -> Link:
        package = operation.package

        archive = self._chef.get_cached_archive_for_link(link)
        if archive is link:
            # No cached distributions was found, so we download and prepare it
            try:
                archive = self._download_archive(operation, link)
            except BaseException:
                cache_directory = self._chef.get_cache_directory_for_link(link)
                cached_file = cache_directory.joinpath(link.filename)
                # We can't use unlink(missing_ok=True) because it's not available
                # prior to Python 3.8
                if cached_file.exists():
                    cached_file.unlink()

                raise

            # TODO: Check readability of the created archive

            if not link.is_wheel:
                archive = self._chef.prepare(archive)

        if package.files:
            archive_hash = self._validate_archive_hash(archive, package)

            self._hashes[package.name] = archive_hash

        return archive

    @staticmethod
    def _validate_archive_hash(archive: Path | Link, package: Package) -> str:
        archive_path = (url_to_path(archive.url)
                        if isinstance(archive, Link) else archive)
        file_dep = FileDependency(
            package.name,
            archive_path,
        )
        archive_hash = "sha256:" + file_dep.hash()
        known_hashes = {f["hash"] for f in package.files}

        if archive_hash not in known_hashes:
            raise RuntimeError(
                f"Hash for {package} from archive {archive_path.name} not found in"
                f" known hashes (was: {archive_hash})")

        return archive_hash

    def _download_archive(self, operation: Install | Update,
                          link: Link) -> Path:
        response = self._authenticator.request("get",
                                               link.url,
                                               stream=True,
                                               io=self._sections.get(
                                                   id(operation), self._io))
        wheel_size = response.headers.get("content-length")
        operation_message = self.get_operation_message(operation)
        message = (
            f"  <fg=blue;options=bold>•</> {operation_message}: <info>Downloading...</>"
        )
        progress = None
        if self.supports_fancy_output():
            if wheel_size is None:
                self._write(operation, message)
            else:
                from cleo.ui.progress_bar import ProgressBar

                progress = ProgressBar(self._sections[id(operation)],
                                       max=int(wheel_size))
                progress.set_format(message + " <b>%percent%%</b>")

        if progress:
            with self._lock:
                self._sections[id(operation)].clear()
                progress.start()

        done = 0
        archive = self._chef.get_cache_directory_for_link(link) / link.filename
        archive.parent.mkdir(parents=True, exist_ok=True)
        with archive.open("wb") as f:
            for chunk in response.iter_content(chunk_size=4096):
                if not chunk:
                    break

                done += len(chunk)

                if progress:
                    with self._lock:
                        progress.set_progress(done)

                f.write(chunk)

        if progress:
            with self._lock:
                progress.finish()

        return archive

    def _should_write_operation(self, operation: Operation) -> bool:
        return not operation.skipped or self._dry_run or self._verbose

    def _save_url_reference(self, operation: Operation) -> None:
        """
        Create and store a PEP-610 `direct_url.json` file, if needed.
        """
        if operation.job_type not in {"install", "update"}:
            return

        package = operation.package

        if not package.source_url or package.source_type == "legacy":
            # Since we are installing from our own distribution cache
            # pip will write a `direct_url.json` file pointing to the cache
            # distribution.
            # That's not what we want, so we remove the direct_url.json file,
            # if it exists.
            for (
                    direct_url_json
            ) in self._env.site_packages.find_distribution_direct_url_json_files(
                    distribution_name=package.name, writable_only=True):
                # We can't use unlink(missing_ok=True) because it's not always available
                if direct_url_json.exists():
                    direct_url_json.unlink()
            return

        url_reference = None

        if package.source_type == "git":
            url_reference = self._create_git_url_reference(package)
        elif package.source_type == "url":
            url_reference = self._create_url_url_reference(package)
        elif package.source_type == "directory":
            url_reference = self._create_directory_url_reference(package)
        elif package.source_type == "file":
            url_reference = self._create_file_url_reference(package)

        if url_reference:
            for dist in self._env.site_packages.distributions(
                    name=package.name, writable_only=True):
                dist._path.joinpath("direct_url.json").write_text(
                    json.dumps(url_reference),
                    encoding="utf-8",
                )

                record = dist._path.joinpath("RECORD")
                if record.exists():
                    with record.open(mode="a", encoding="utf-8") as f:
                        writer = csv.writer(f)
                        writer.writerow([
                            str(
                                dist._path.joinpath(
                                    "direct_url.json").relative_to(
                                        record.parent.parent)),
                            "",
                            "",
                        ])

    def _create_git_url_reference(
            self, package: Package) -> dict[str, str | dict[str, str]]:
        reference = {
            "url": package.source_url,
            "vcs_info": {
                "vcs": "git",
                "requested_revision": package.source_reference,
                "commit_id": package.source_resolved_reference,
            },
        }

        return reference

    def _create_url_url_reference(
            self, package: Package) -> dict[str, str | dict[str, str]]:
        archive_info = {}

        if package.name in self._hashes:
            archive_info["hash"] = self._hashes[package.name]

        reference = {"url": package.source_url, "archive_info": archive_info}

        return reference

    def _create_file_url_reference(
            self, package: Package) -> dict[str, str | dict[str, str]]:
        archive_info = {}

        if package.name in self._hashes:
            archive_info["hash"] = self._hashes[package.name]

        reference = {
            "url": Path(package.source_url).as_uri(),
            "archive_info": archive_info,
        }

        return reference

    def _create_directory_url_reference(
            self, package: Package) -> dict[str, str | dict[str, str]]:
        dir_info = {}

        if package.develop:
            dir_info["editable"] = True

        reference = {
            "url": Path(package.source_url).as_uri(),
            "dir_info": dir_info,
        }

        return reference