예제 #1
0
    def create_poetry(
        self,
        cwd: Optional[Path] = None,
        io: Optional["IO"] = None,
        disable_plugins: bool = False,
    ) -> Poetry:
        if io is None:
            io = NullIO()

        base_poetry = super().create_poetry(cwd)

        locker = Locker(base_poetry.file.parent / "poetry.lock",
                        base_poetry.local_config)

        # Loading global configuration
        config = self.create_config(io)

        # Loading local configuration
        local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
        if local_config_file.exists():
            if io.is_debug():
                io.write_line(
                    f"Loading configuration file {local_config_file.path}")

            config.merge(local_config_file.read())

        # Load local sources
        repositories = {}
        existing_repositories = config.get("repositories", {})
        for source in base_poetry.pyproject.poetry_config.get("source", []):
            name = source.get("name")
            url = source.get("url")
            if name and url and name not in existing_repositories:
                repositories[name] = {"url": url}

        config.merge({"repositories": repositories})

        poetry = Poetry(
            base_poetry.file.path,
            base_poetry.local_config,
            base_poetry.package,
            locker,
            config,
        )

        # Configuring sources
        self.configure_sources(poetry, poetry.local_config.get("source", []),
                               config, io)

        plugin_manager = PluginManager("plugin",
                                       disable_plugins=disable_plugins)
        plugin_manager.load_plugins()
        poetry.set_plugin_manager(plugin_manager)
        plugin_manager.activate(poetry, io)

        return poetry
예제 #2
0
def test_validate_fails():
    complete = TOMLFile(fixtures_dir / "complete.toml")
    content = complete.read()["tool"]["poetry"]
    content["this key is not in the schema"] = ""

    expected = (
        "Additional properties are not allowed "
        "('this key is not in the schema' was unexpected)"
    )

    assert Factory.validate(content) == {"errors": [expected], "warnings": []}
예제 #3
0
    def create_config(cls, io: Optional["IO"] = None) -> Config:
        if io is None:
            io = NullIO()

        config = Config()
        # Load global config
        config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")
        if config_file.exists():
            if io.is_debug():
                io.write_line(
                    f"<debug>Loading configuration file {config_file.path}</debug>"
                )

            config.merge(config_file.read())

        config.set_config_source(FileConfigSource(config_file))

        # Load global auth config
        auth_config_file = TOMLFile(Path(CONFIG_DIR) / "auth.toml")
        if auth_config_file.exists():
            if io.is_debug():
                io.write_line(
                    f"<debug>Loading configuration file {auth_config_file.path}</debug>"
                )

            config.merge(auth_config_file.read())

        config.set_auth_config_source(FileConfigSource(auth_config_file))

        return config
예제 #4
0
def test_activate_activates_non_existing_virtualenv_no_envs_file(
    tmp_dir: str,
    manager: EnvManager,
    poetry: Poetry,
    config: Config,
    mocker: MockerFixture,
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )
    mocker.patch(
        "subprocess.Popen.communicate",
        side_effect=[("/prefix", None), ("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                     side_effect=build_venv)

    env = manager.activate("python3.7", NullIO())
    venv_name = EnvManager.generate_env_name("simple-project",
                                             str(poetry.file.parent))

    m.assert_called_with(
        Path(tmp_dir) / f"{venv_name}-py3.7",
        executable="/usr/bin/python3.7",
        flags={
            "always-copy": False,
            "system-site-packages": False,
            "no-pip": False,
            "no-setuptools": False,
        },
        prompt="simple-project-py3.7",
    )

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7"
    assert env.base == Path("/prefix")
예제 #5
0
 def __init__(self, lock: str | Path, local_config: dict):
     self._lock = TOMLFile(lock)
     self._local_config = local_config
     self._lock_data = None
     self._content_hash = self._get_content_hash()
     self._locked = False
     self._lock_data = None
     self._write = False
예제 #6
0
파일: helpers.py 프로젝트: yosmoc/poetry
 def __init__(self, lock, local_config):  # noqa
     self._lock = TOMLFile(lock)
     self._local_config = local_config
     self._lock_data = None
     self._content_hash = self._get_content_hash()
     self._locked = False
     self._lock_data = None
     self._write = False
예제 #7
0
def test_deactivate_activated(
    tmp_dir: str,
    manager: EnvManager,
    poetry: Poetry,
    config: Config,
    mocker: MockerFixture,
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project",
                                          str(poetry.file.parent))
    version = Version.from_parts(*sys.version_info[:3])
    other_version = Version.parse(
        "3.4") if version.major == 2 else version.next_minor()
    (Path(tmp_dir) / f"{venv_name}-py{version.major}.{version.minor}").mkdir()
    (Path(tmp_dir) /
     f"{venv_name}-py{other_version.major}.{other_version.minor}").mkdir()

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {
        "minor": f"{other_version.major}.{other_version.minor}",
        "patch": other_version.text,
    }
    envs_file.write(doc)

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )

    manager.deactivate(NullIO())
    env = manager.get()

    assert env.path == Path(
        tmp_dir) / f"{venv_name}-py{version.major}.{version.minor}"
    assert Path("/prefix")

    envs = envs_file.read()
    assert len(envs) == 0
예제 #8
0
def test_activate_with_in_project_setting_does_not_fail_if_no_venvs_dir(
    manager: EnvManager,
    poetry: Poetry,
    config: Config,
    tmp_dir: str,
    mocker: MockerFixture,
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    config.merge({
        "virtualenvs": {
            "path": str(Path(tmp_dir) / "virtualenvs"),
            "in-project": True,
        }
    })

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )
    mocker.patch(
        "subprocess.Popen.communicate",
        side_effect=[("/prefix", None), ("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv")

    manager.activate("python3.7", NullIO())

    m.assert_called_with(
        poetry.file.parent / ".venv",
        executable="/usr/bin/python3.7",
        flags={
            "always-copy": False,
            "system-site-packages": False,
            "no-pip": False,
            "no-setuptools": False,
        },
        prompt="simple-project-py3.7",
    )

    envs_file = TOMLFile(Path(tmp_dir) / "virtualenvs" / "envs.toml")
    assert not envs_file.exists()
예제 #9
0
def test_activate_activates_non_existing_virtualenv_no_envs_file(
    mocker: MockerFixture,
    tester: CommandTester,
    venv_cache: Path,
    venv_name: str,
    venvs_in_cache_config: None,
):
    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )

    mock_build_env = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                                  side_effect=build_venv)

    tester.execute("3.7")

    venv_py37 = venv_cache / f"{venv_name}-py3.7"
    mock_build_env.assert_called_with(
        venv_py37,
        executable="/usr/bin/python3.7",
        flags={
            "always-copy": False,
            "system-site-packages": False,
            "no-pip": False,
            "no-setuptools": False,
        },
        prompt="simple-project-py3.7",
    )

    envs_file = TOMLFile(venv_cache / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    expected = f"""\
Creating virtualenv {venv_py37.name} in {venv_py37.parent}
Using virtualenv: {venv_py37}
"""

    assert tester.io.fetch_output() == expected
예제 #10
0
def test_activate_activates_non_existing_virtualenv_no_envs_file(
        mocker, tester, venv_cache, venv_name, venvs_in_cache_config):
    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )

    mock_build_env = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                                  side_effect=build_venv)

    tester.execute("3.7")

    venv_py37 = venv_cache / "{}-py3.7".format(venv_name)
    mock_build_env.assert_called_with(
        venv_py37,
        executable="python3.7",
        flags={
            "always-copy": False,
            "system-site-packages": False
        },
        with_pip=True,
        with_setuptools=True,
        with_wheel=True,
    )

    envs_file = TOMLFile(venv_cache / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    expected = """\
Creating virtualenv {} in {}
Using virtualenv: {}
""".format(
        venv_py37.name,
        venv_py37.parent,
        venv_py37,
    )

    assert expected == tester.io.fetch_output()
예제 #11
0
def test_get_prefers_explicitly_activated_virtualenvs_over_env_var(
        tester, current_python, venv_cache, venv_name, venvs_in_cache_config):
    os.environ["VIRTUAL_ENV"] = "/environment/prefix"

    python_minor = ".".join(str(v) for v in current_python[:2])
    python_patch = ".".join(str(v) for v in current_python[:3])
    venv_dir = venv_cache / "{}-py{}".format(venv_name, python_minor)
    venv_dir.mkdir(parents=True, exist_ok=True)

    envs_file = TOMLFile(venv_cache / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": python_minor, "patch": python_patch}
    envs_file.write(doc)

    tester.execute(python_minor)

    expected = """\
Using virtualenv: {}
""".format(venv_dir)

    assert expected == tester.io.fetch_output()
예제 #12
0
def test_activate_activates_existing_virtualenv_no_envs_file(
    tmp_dir: str,
    manager: EnvManager,
    poetry: Poetry,
    config: Config,
    mocker: MockerFixture,
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project",
                                          str(poetry.file.parent))

    os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7"))

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )
    mocker.patch(
        "subprocess.Popen.communicate",
        side_effect=[("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                     side_effect=build_venv)

    env = manager.activate("python3.7", NullIO())

    m.assert_not_called()

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    assert env.path == Path(tmp_dir) / f"{venv_name}-py3.7"
    assert env.base == Path("/prefix")
예제 #13
0
def test_deactivate_activated(tmp_dir, manager, poetry, config, mocker):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent))
    version = Version.parse(".".join(str(c) for c in sys.version_info[:3]))
    other_version = Version.parse("3.4") if version.major == 2 else version.next_minor
    (
        Path(tmp_dir) / "{}-py{}.{}".format(venv_name, version.major, version.minor)
    ).mkdir()
    (
        Path(tmp_dir)
        / "{}-py{}.{}".format(venv_name, other_version.major, other_version.minor)
    ).mkdir()

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {
        "minor": "{}.{}".format(other_version.major, other_version.minor),
        "patch": other_version.text,
    }
    envs_file.write(doc)

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "poetry.utils._compat.subprocess.check_output",
        side_effect=check_output_wrapper(),
    )

    manager.deactivate(NullIO())
    env = manager.get()

    assert env.path == Path(tmp_dir) / "{}-py{}.{}".format(
        venv_name, version.major, version.minor
    )
    assert Path("/prefix")

    envs = envs_file.read()
    assert len(envs) == 0
예제 #14
0
def test_activate_activates_recreates_for_different_patch(
        tmp_dir, manager, poetry, config, mocker):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project",
                                          str(poetry.file.parent))
    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"}
    envs_file.write(doc)

    os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name)))

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )
    mocker.patch(
        "subprocess.Popen.communicate",
        side_effect=[
            ("/prefix", None),
            ('{"version_info": [3, 7, 0]}', None),
            ("/prefix", None),
            ("/prefix", None),
            ("/prefix", None),
        ],
    )
    build_venv_m = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                                side_effect=build_venv)
    remove_venv_m = mocker.patch("poetry.utils.env.EnvManager.remove_venv",
                                 side_effect=EnvManager.remove_venv)

    env = manager.activate("python3.7", NullIO())

    build_venv_m.assert_called_with(
        Path(tmp_dir) / "{}-py3.7".format(venv_name),
        executable="python3.7",
        flags={
            "always-copy": False,
            "system-site-packages": False
        },
        with_pip=True,
        with_setuptools=True,
        with_wheel=True,
    )
    remove_venv_m.assert_called_with(
        Path(tmp_dir) / "{}-py3.7".format(venv_name))

    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name)
    assert env.base == Path("/prefix")
    assert (Path(tmp_dir) / "{}-py3.7".format(venv_name)).exists()
예제 #15
0
def test_remove_also_deactivates(tmp_dir, manager, poetry, config, mocker):
    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent))
    (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir()
    (Path(tmp_dir) / "{}-py3.6".format(venv_name)).mkdir()

    mocker.patch(
        "poetry.utils._compat.subprocess.check_output",
        side_effect=check_output_wrapper(Version.parse("3.6.6")),
    )

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": "3.6", "patch": "3.6.6"}
    envs_file.write(doc)

    venv = manager.remove("python3.6")

    assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)) == venv.path
    assert not (Path(tmp_dir) / "{}-py3.6".format(venv_name)).exists()

    envs = envs_file.read()
    assert venv_name not in envs
예제 #16
0
def test_activate_activates_non_existing_virtualenv_no_envs_file(
        tmp_dir, manager, poetry, config, mocker):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "poetry.utils._compat.subprocess.check_output",
        side_effect=check_output_wrapper(),
    )
    mocker.patch(
        "poetry.utils._compat.subprocess.Popen.communicate",
        side_effect=[("/prefix", None), ("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                     side_effect=build_venv)

    env = manager.activate("python3.7", NullIO())
    venv_name = EnvManager.generate_env_name("simple-project",
                                             str(poetry.file.parent))

    m.assert_called_with(
        Path(tmp_dir) / "{}-py3.7".format(venv_name),
        executable="python3.7",
        flags={"always-copy": False},
    )

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name)
    assert env.base == Path("/prefix")
예제 #17
0
def test_activate_activates_different_virtualenv_with_envs_file(
    tmp_dir: str,
    manager: EnvManager,
    poetry: "Poetry",
    config: "Config",
    mocker: "MockerFixture",
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project",
                                          str(poetry.file.parent))
    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": "3.7", "patch": "3.7.1"}
    envs_file.write(doc)

    os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7"))

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(Version.parse("3.6.6")),
    )
    mocker.patch(
        "subprocess.Popen.communicate",
        side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                     side_effect=build_venv)

    env = manager.activate("python3.6", NullIO())

    m.assert_called_with(
        Path(tmp_dir) / f"{venv_name}-py3.6",
        executable="python3.6",
        flags={
            "always-copy": False,
            "system-site-packages": False
        },
        with_pip=True,
        with_setuptools=True,
        with_wheel=True,
    )

    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.6"
    assert envs[venv_name]["patch"] == "3.6.6"

    assert env.path == Path(tmp_dir) / f"{venv_name}-py3.6"
    assert env.base == Path("/prefix")
예제 #18
0
파일: test_env.py 프로젝트: tony/poetry
def test_activate_does_not_recreate_when_switching_minor(
    tmp_dir: str,
    manager: EnvManager,
    poetry: "Poetry",
    config: "Config",
    mocker: "MockerFixture",
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent))
    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": "3.7", "patch": "3.7.0"}
    envs_file.write(doc)

    os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.7"))
    os.mkdir(os.path.join(tmp_dir, f"{venv_name}-py3.6"))

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(Version.parse("3.6.6")),
    )
    mocker.patch(
        "subprocess.Popen.communicate",
        side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)],
    )
    build_venv_m = mocker.patch(
        "poetry.utils.env.EnvManager.build_venv", side_effect=build_venv
    )
    remove_venv_m = mocker.patch(
        "poetry.utils.env.EnvManager.remove_venv", side_effect=EnvManager.remove_venv
    )

    env = manager.activate("python3.6", NullIO())

    build_venv_m.assert_not_called()
    remove_venv_m.assert_not_called()

    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.6"
    assert envs[venv_name]["patch"] == "3.6.6"

    assert env.path == Path(tmp_dir) / f"{venv_name}-py3.6"
    assert env.base == Path("/prefix")
    assert (Path(tmp_dir) / f"{venv_name}-py3.6").exists()
예제 #19
0
    def deactivate(self, io: IO) -> None:
        venv_path = self._poetry.config.get("virtualenvs.path")
        if venv_path is None:
            venv_path = Path(CACHE_DIR) / "virtualenvs"
        else:
            venv_path = Path(venv_path)

        name = self._poetry.package.name
        name = self.generate_env_name(name, str(self._poetry.file.parent))

        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
        if envs_file.exists():
            envs = envs_file.read()
            env = envs.get(name)
            if env is not None:
                io.write_line(
                    "Deactivating virtualenv: <comment>{}</comment>".format(
                        venv_path / (name + "-py{}".format(env["minor"]))))
                del envs[name]

                envs_file.write(envs)
예제 #20
0
def test_activate_activates_different_virtualenv_with_envs_file(
    tmp_dir, manager, poetry, config, mocker
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent))
    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": "3.7", "patch": "3.7.1"}
    envs_file.write(doc)

    os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name)))

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "poetry.utils._compat.subprocess.check_output",
        side_effect=check_output_wrapper(Version.parse("3.6.6")),
    )
    mocker.patch(
        "poetry.utils._compat.subprocess.Popen.communicate",
        side_effect=[("/prefix", None), ("/prefix", None), ("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv)

    env = manager.activate("python3.6", NullIO())

    m.assert_called_with(
        Path(tmp_dir) / "{}-py3.6".format(venv_name), executable="python3.6"
    )

    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.6"
    assert envs[venv_name]["patch"] == "3.6.6"

    assert env.path == Path(tmp_dir) / "{}-py3.6".format(venv_name)
    assert env.base == Path("/prefix")
예제 #21
0
 def __init__(self, lock: Union[str, Path], local_config: dict) -> None:
     self._lock = TOMLFile(lock)
     self._local_config = local_config
     self._lock_data = None
     self._content_hash = self._get_content_hash()
예제 #22
0
class Locker:

    _VERSION = "1.1"

    _relevant_keys = ["dependencies", "group", "source", "extras"]

    def __init__(self, lock: Union[str, Path], local_config: dict) -> None:
        self._lock = TOMLFile(lock)
        self._local_config = local_config
        self._lock_data = None
        self._content_hash = self._get_content_hash()

    @property
    def lock(self) -> TOMLFile:
        return self._lock

    @property
    def lock_data(self) -> "TOMLDocument":
        if self._lock_data is None:
            self._lock_data = self._get_lock_data()

        return self._lock_data

    def is_locked(self) -> bool:
        """
        Checks whether the locker has been locked (lockfile found).
        """
        if not self._lock.exists():
            return False

        return "package" in self.lock_data

    def is_fresh(self) -> bool:
        """
        Checks whether the lock file is still up to date with the current hash.
        """
        lock = self._lock.read()
        metadata = lock.get("metadata", {})

        if "content-hash" in metadata:
            return self._content_hash == lock["metadata"]["content-hash"]

        return False

    def locked_repository(
            self,
            with_dev_reqs: bool = False) -> poetry.repositories.Repository:
        """
        Searches and returns a repository of locked packages.
        """
        from poetry.factory import Factory

        if not self.is_locked():
            return poetry.repositories.Repository()

        lock_data = self.lock_data
        packages = poetry.repositories.Repository()

        if with_dev_reqs:
            locked_packages = lock_data["package"]
        else:
            locked_packages = [
                p for p in lock_data["package"] if p["category"] == "main"
            ]

        if not locked_packages:
            return packages

        for info in locked_packages:
            source = info.get("source", {})
            source_type = source.get("type")
            url = source.get("url")
            if source_type in ["directory", "file"]:
                url = self._lock.path.parent.joinpath(url).resolve().as_posix()

            package = Package(
                info["name"],
                info["version"],
                info["version"],
                source_type=source_type,
                source_url=url,
                source_reference=source.get("reference"),
                source_resolved_reference=source.get("resolved_reference"),
            )
            package.description = info.get("description", "")
            package.category = info.get("category", "main")
            package.groups = info.get("groups", ["default"])
            package.optional = info["optional"]
            if "hashes" in lock_data["metadata"]:
                # Old lock so we create dummy files from the hashes
                package.files = [{
                    "name": h,
                    "hash": h
                } for h in lock_data["metadata"]["hashes"][info["name"]]]
            else:
                package.files = lock_data["metadata"]["files"][info["name"]]

            package.python_versions = info["python-versions"]
            extras = info.get("extras", {})
            if extras:
                for name, deps in extras.items():
                    package.extras[name] = []

                    for dep in deps:
                        try:
                            dependency = Dependency.create_from_pep_508(dep)
                        except InvalidRequirement:
                            # handle lock files with invalid PEP 508
                            m = re.match(
                                r"^(.+?)(?:\[(.+?)])?(?:\s+\((.+)\))?$", dep)
                            dep_name = m.group(1)
                            extras = m.group(2) or ""
                            constraint = m.group(3) or "*"
                            dependency = Dependency(dep_name,
                                                    constraint,
                                                    extras=extras.split(","))
                        package.extras[name].append(dependency)

            if "marker" in info:
                package.marker = parse_marker(info["marker"])
            else:
                # Compatibility for old locks
                if "requirements" in info:
                    dep = Dependency("foo", "0.0.0")
                    for name, value in info["requirements"].items():
                        if name == "python":
                            dep.python_versions = value
                        elif name == "platform":
                            dep.platform = value

                    split_dep = dep.to_pep_508(False).split(";")
                    if len(split_dep) > 1:
                        package.marker = parse_marker(split_dep[1].strip())

            for dep_name, constraint in info.get("dependencies", {}).items():

                root_dir = self._lock.path.parent
                if package.source_type == "directory":
                    # root dir should be the source of the package relative to the lock path
                    root_dir = Path(package.source_url)

                if isinstance(constraint, list):
                    for c in constraint:
                        package.add_dependency(
                            Factory.create_dependency(dep_name,
                                                      c,
                                                      root_dir=root_dir))

                    continue

                package.add_dependency(
                    Factory.create_dependency(dep_name,
                                              constraint,
                                              root_dir=root_dir))

            if "develop" in info:
                package.develop = info["develop"]

            packages.add_package(package)

        return packages

    @staticmethod
    def __get_locked_package(
            _dependency: Dependency,
            packages_by_name: Dict[str, List[Package]]) -> Optional[Package]:
        """
        Internal helper to identify corresponding locked package using dependency
        version constraints.
        """
        for _package in packages_by_name.get(_dependency.name, []):
            if _dependency.constraint.allows(_package.version):
                return _package
        return None

    @classmethod
    def __walk_dependency_level(
        cls,
        dependencies: List[Dependency],
        level: int,
        pinned_versions: bool,
        packages_by_name: Dict[str, List[Package]],
        project_level_dependencies: Set[str],
        nested_dependencies: Dict[Tuple[str, str], Dependency],
    ) -> Dict[Tuple[str, str], Dependency]:
        if not dependencies:
            return nested_dependencies

        next_level_dependencies = []

        for requirement in dependencies:
            key = (requirement.name, requirement.pretty_constraint)
            locked_package = cls.__get_locked_package(requirement,
                                                      packages_by_name)

            if locked_package:
                # create dependency from locked package to retain dependency metadata
                # if this is not done, we can end-up with incorrect nested dependencies
                marker = requirement.marker
                requirement = locked_package.to_dependency()
                requirement.marker = requirement.marker.intersect(marker)

                key = (requirement.name, requirement.pretty_constraint)

                if pinned_versions:
                    requirement.set_constraint(
                        locked_package.to_dependency().constraint)

                if key not in nested_dependencies:
                    for require in locked_package.requires:
                        if require.marker.is_empty():
                            require.marker = requirement.marker
                        else:
                            require.marker = require.marker.intersect(
                                requirement.marker)

                        require.marker = require.marker.intersect(
                            locked_package.marker)
                        next_level_dependencies.append(require)

            if requirement.name in project_level_dependencies and level == 0:
                # project level dependencies take precedence
                continue

            if not locked_package:
                # we make a copy to avoid any side-effects
                requirement = deepcopy(requirement)

            if key not in nested_dependencies:
                nested_dependencies[key] = requirement
            else:
                nested_dependencies[key].marker = nested_dependencies[
                    key].marker.union(requirement.marker)

        return cls.__walk_dependency_level(
            dependencies=next_level_dependencies,
            level=level + 1,
            pinned_versions=pinned_versions,
            packages_by_name=packages_by_name,
            project_level_dependencies=project_level_dependencies,
            nested_dependencies=nested_dependencies,
        )

    @classmethod
    def get_project_dependencies(
        cls,
        project_requires: List[Dependency],
        locked_packages: List[Package],
        pinned_versions: bool = False,
        with_nested: bool = False,
    ) -> Iterable[Dependency]:
        # group packages entries by name, this is required because requirement might use different constraints
        packages_by_name = {}
        for pkg in locked_packages:
            if pkg.name not in packages_by_name:
                packages_by_name[pkg.name] = []
            packages_by_name[pkg.name].append(pkg)

        project_level_dependencies = set()
        dependencies = []

        for dependency in project_requires:
            dependency = deepcopy(dependency)
            locked_package = cls.__get_locked_package(dependency,
                                                      packages_by_name)
            if locked_package:
                locked_dependency = locked_package.to_dependency()
                locked_dependency.marker = dependency.marker.intersect(
                    locked_package.marker)

                if not pinned_versions:
                    locked_dependency.set_constraint(dependency.constraint)

                dependency = locked_dependency

            project_level_dependencies.add(dependency.name)
            dependencies.append(dependency)

        if not with_nested:
            # return only with project level dependencies
            return dependencies

        nested_dependencies = cls.__walk_dependency_level(
            dependencies=dependencies,
            level=0,
            pinned_versions=pinned_versions,
            packages_by_name=packages_by_name,
            project_level_dependencies=project_level_dependencies,
            nested_dependencies=dict(),
        )

        # Merge same dependencies using marker union
        for requirement in dependencies:
            key = (requirement.name, requirement.pretty_constraint)
            if key not in nested_dependencies:
                nested_dependencies[key] = requirement
            else:
                nested_dependencies[key].marker = nested_dependencies[
                    key].marker.union(requirement.marker)

        return sorted(nested_dependencies.values(),
                      key=lambda x: x.name.lower())

    def get_project_dependency_packages(
        self,
        project_requires: List[Dependency],
        dev: bool = False,
        extras: Optional[Union[bool, Sequence[str]]] = None,
    ) -> Iterator[DependencyPackage]:
        repository = self.locked_repository(with_dev_reqs=dev)

        # Build a set of all packages required by our selected extras
        extra_package_names = (None if (isinstance(extras, bool)
                                        and extras is True) else ())

        if extra_package_names is not None:
            extra_package_names = set(
                get_extra_package_names(
                    repository.packages,
                    self.lock_data.get("extras", {}),
                    extras or (),
                ))

        # If a package is optional and we haven't opted in to it, do not select
        selected = []
        for dependency in project_requires:
            try:
                package = repository.find_packages(dependency=dependency)[0]
            except IndexError:
                continue

            if extra_package_names is not None and (
                    package.optional
                    and package.name not in extra_package_names):
                # a package is locked as optional, but is not activated via extras
                continue

            selected.append(dependency)

        for dependency in self.get_project_dependencies(
                project_requires=selected,
                locked_packages=repository.packages,
                with_nested=True,
        ):
            try:
                package = repository.find_packages(dependency=dependency)[0]
            except IndexError:
                continue

            for extra in dependency.extras:
                package.requires_extras.append(extra)

            yield DependencyPackage(dependency=dependency, package=package)

    def set_lock_data(self, root: Package, packages: List[Package]) -> bool:
        files = table()
        packages = self._lock_packages(packages)
        # Retrieving hashes
        for package in packages:
            if package["name"] not in files:
                files[package["name"]] = []

            for f in package["files"]:
                file_metadata = inline_table()
                for k, v in sorted(f.items()):
                    file_metadata[k] = v

                files[package["name"]].append(file_metadata)

            if files[package["name"]]:
                files[package["name"]] = item(
                    files[package["name"]]).multiline(True)

            del package["files"]

        lock = document()
        lock["package"] = packages

        if root.extras:
            lock["extras"] = {
                extra: [dep.pretty_name for dep in deps]
                for extra, deps in sorted(root.extras.items())
            }

        lock["metadata"] = dict([
            ("lock-version", self._VERSION),
            ("python-versions", root.python_versions),
            ("content-hash", self._content_hash),
            ("files", files),
        ])

        if not self.is_locked() or lock != self.lock_data:
            self._write_lock_data(lock)

            return True

        return False

    def _write_lock_data(self, data: "TOMLDocument") -> None:
        self.lock.write(data)

        # Checking lock file data consistency
        if data != self.lock.read():
            raise RuntimeError("Inconsistent lock file data.")

        self._lock_data = None

    def _get_content_hash(self) -> str:
        """
        Returns the sha256 hash of the sorted content of the pyproject file.
        """
        content = self._local_config

        relevant_content = {}
        for key in self._relevant_keys:
            relevant_content[key] = content.get(key)

        content_hash = sha256(
            json.dumps(relevant_content, sort_keys=True).encode()).hexdigest()

        return content_hash

    def _get_lock_data(self) -> "TOMLDocument":
        if not self._lock.exists():
            raise RuntimeError(
                "No lockfile found. Unable to read locked packages")

        try:
            lock_data = self._lock.read()
        except TOMLKitError as e:
            raise RuntimeError(f"Unable to read the lock file ({e}).")

        lock_version = Version.parse(lock_data["metadata"].get(
            "lock-version", "1.0"))
        current_version = Version.parse(self._VERSION)
        # We expect the locker to be able to read lock files
        # from the same semantic versioning range
        accepted_versions = parse_constraint("^{}".format(
            Version.from_parts(current_version.major, 0)))
        lock_version_allowed = accepted_versions.allows(lock_version)
        if lock_version_allowed and current_version < lock_version:
            logger.warning(
                "The lock file might not be compatible with the current version of Poetry.\n"
                "Upgrade Poetry to ensure the lock file is read properly or, alternatively, "
                "regenerate the lock file with the `poetry lock` command.")
        elif not lock_version_allowed:
            raise RuntimeError(
                "The lock file is not compatible with the current version of Poetry.\n"
                "Upgrade Poetry to be able to read the lock file or, alternatively, "
                "regenerate the lock file with the `poetry lock` command.")

        return lock_data

    def _lock_packages(self, packages: List[Package]) -> list:
        locked = []

        for package in sorted(packages, key=lambda x: x.name):
            spec = self._dump_package(package)

            locked.append(spec)

        return locked

    def _dump_package(self, package: Package) -> dict:
        dependencies = {}
        for dependency in sorted(package.requires, key=lambda d: d.name):
            if dependency.pretty_name not in dependencies:
                dependencies[dependency.pretty_name] = []

            constraint = inline_table()

            if dependency.is_directory() or dependency.is_file():
                constraint["path"] = dependency.path.as_posix()

                if dependency.is_directory() and dependency.develop:
                    constraint["develop"] = True
            elif dependency.is_url():
                constraint["url"] = dependency.url
            elif dependency.is_vcs():
                constraint[dependency.vcs] = dependency.source

                if dependency.branch:
                    constraint["branch"] = dependency.branch
                elif dependency.tag:
                    constraint["tag"] = dependency.tag
                elif dependency.rev:
                    constraint["rev"] = dependency.rev
            else:
                constraint["version"] = str(dependency.pretty_constraint)

            if dependency.extras:
                constraint["extras"] = sorted(dependency.extras)

            if dependency.is_optional():
                constraint["optional"] = True

            if not dependency.marker.is_any():
                constraint["markers"] = str(dependency.marker)

            dependencies[dependency.pretty_name].append(constraint)

        # All the constraints should have the same type,
        # but we want to simplify them if it's possible
        for dependency, constraints in tuple(dependencies.items()):
            if all(
                    len(constraint) == 1 and "version" in constraint
                    for constraint in constraints):
                dependencies[dependency] = [
                    constraint["version"] for constraint in constraints
                ]

        data = dict([
            ("name", package.pretty_name),
            ("version", package.pretty_version),
            ("description", package.description or ""),
            ("category", package.category),
            ("optional", package.optional),
            ("python-versions", package.python_versions),
            ("files", sorted(package.files, key=lambda x: x["file"])),
        ])

        if dependencies:
            data["dependencies"] = table()
            for k, constraints in dependencies.items():
                if len(constraints) == 1:
                    data["dependencies"][k] = constraints[0]
                else:
                    data["dependencies"][k] = array().multiline(True)
                    for constraint in constraints:
                        data["dependencies"][k].append(constraint)

        if package.extras:
            extras = {}
            for name, deps in package.extras.items():
                # TODO: This should use dep.to_pep_508() once this is fixed
                # https://github.com/python-poetry/poetry-core/pull/102
                extras[name] = [
                    dep.base_pep_508_name
                    if not dep.constraint.is_any() else dep.name
                    for dep in deps
                ]

            data["extras"] = extras

        if package.source_url:
            url = package.source_url
            if package.source_type in ["file", "directory"]:
                # The lock file should only store paths relative to the root project
                url = Path(
                    os.path.relpath(
                        Path(url).as_posix(),
                        self._lock.path.parent.as_posix())).as_posix()

            data["source"] = dict()

            if package.source_type:
                data["source"]["type"] = package.source_type

            data["source"]["url"] = url

            if package.source_reference:
                data["source"]["reference"] = package.source_reference

            if package.source_resolved_reference:
                data["source"][
                    "resolved_reference"] = package.source_resolved_reference

            if package.source_type in ["directory", "git"]:
                data["develop"] = package.develop

        return data
예제 #23
0
 def __init__(self):
     self._lock = TOMLFile(Path.cwd().joinpath("poetry.lock"))
     self._locked = True
     self._content_hash = self._get_content_hash()
예제 #24
0
def test_validate():
    complete = TOMLFile(fixtures_dir / "complete.toml")
    content = complete.read()["tool"]["poetry"]

    assert Factory.validate(content) == {"errors": [], "warnings": []}
예제 #25
0
    def handle(self) -> int | None:
        from pathlib import Path

        from poetry.core.pyproject.exceptions import PyProjectException
        from poetry.core.toml.file import TOMLFile

        from poetry.config.file_config_source import FileConfigSource
        from poetry.factory import Factory
        from poetry.locations import CONFIG_DIR

        config = Factory.create_config(self.io)
        config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")

        try:
            local_config_file = TOMLFile(self.poetry.file.parent /
                                         "poetry.toml")
            if local_config_file.exists():
                config.merge(local_config_file.read())
        except (RuntimeError, PyProjectException):
            local_config_file = TOMLFile(Path.cwd() / "poetry.toml")

        if self.option("local"):
            config.set_config_source(FileConfigSource(local_config_file))

        if not config_file.exists():
            config_file.path.parent.mkdir(parents=True, exist_ok=True)
            config_file.touch(mode=0o0600)

        if self.option("list"):
            self._list_configuration(config.all(), config.raw())

            return 0

        setting_key = self.argument("key")
        if not setting_key:
            return 0

        if self.argument("value") and self.option("unset"):
            raise RuntimeError(
                "You can not combine a setting value with --unset")

        # show the value if no value is provided
        if not self.argument("value") and not self.option("unset"):
            m = re.match(r"^repos?(?:itories)?(?:\.(.+))?",
                         self.argument("key"))
            value: str | dict[str, Any]
            if m:
                if not m.group(1):
                    value = {}
                    if config.get("repositories") is not None:
                        value = config.get("repositories")
                else:
                    repo = config.get(f"repositories.{m.group(1)}")
                    if repo is None:
                        raise ValueError(
                            f"There is no {m.group(1)} repository defined")

                    value = repo

                self.line(str(value))
            else:
                if setting_key not in self.unique_config_values:
                    raise ValueError(f"There is no {setting_key} setting.")

                value = config.get(setting_key)

                if not isinstance(value, str):
                    value = json.dumps(value)

                self.line(value)

            return 0

        values: list[str] = self.argument("value")

        unique_config_values = self.unique_config_values
        if setting_key in unique_config_values:
            if self.option("unset"):
                config.config_source.remove_property(setting_key)
                return None

            return self._handle_single_value(
                config.config_source,
                setting_key,
                unique_config_values[setting_key],
                values,
            )

        # handle repositories
        m = re.match(r"^repos?(?:itories)?(?:\.(.+))?", self.argument("key"))
        if m:
            if not m.group(1):
                raise ValueError(
                    "You cannot remove the [repositories] section")

            if self.option("unset"):
                repo = config.get(f"repositories.{m.group(1)}")
                if repo is None:
                    raise ValueError(
                        f"There is no {m.group(1)} repository defined")

                config.config_source.remove_property(
                    f"repositories.{m.group(1)}")

                return 0

            if len(values) == 1:
                url = values[0]

                config.config_source.add_property(
                    f"repositories.{m.group(1)}.url", url)

                return 0

            raise ValueError(
                "You must pass the url. "
                "Example: poetry config repositories.foo https://bar.com")

        # handle auth
        m = re.match(r"^(http-basic|pypi-token)\.(.+)", self.argument("key"))
        if m:
            from poetry.utils.password_manager import PasswordManager

            password_manager = PasswordManager(config)
            if self.option("unset"):
                if m.group(1) == "http-basic":
                    password_manager.delete_http_password(m.group(2))
                elif m.group(1) == "pypi-token":
                    password_manager.delete_pypi_token(m.group(2))

                return 0

            if m.group(1) == "http-basic":
                if len(values) == 1:
                    username = values[0]
                    # Only username, so we prompt for password
                    password = self.secret("Password:"******"Expected one or two arguments "
                        f"(username, password), got {len(values)}")
                else:
                    username = values[0]
                    password = values[1]

                password_manager.set_http_password(m.group(2), username,
                                                   password)
            elif m.group(1) == "pypi-token":
                if len(values) != 1:
                    raise ValueError(
                        f"Expected only one argument (token), got {len(values)}"
                    )

                token = values[0]

                password_manager.set_pypi_token(m.group(2), token)

            return 0

        # handle certs
        m = re.match(r"(?:certificates)\.([^.]+)\.(cert|client-cert)",
                     self.argument("key"))
        if m:
            if self.option("unset"):
                config.auth_config_source.remove_property(
                    f"certificates.{m.group(1)}.{m.group(2)}")

                return 0

            if len(values) == 1:
                config.auth_config_source.add_property(
                    f"certificates.{m.group(1)}.{m.group(2)}", values[0])
            else:
                raise ValueError("You must pass exactly 1 value")

            return 0

        raise ValueError(f"Setting {self.argument('key')} does not exist")
예제 #26
0
    def remove(self, python: str) -> "Env":
        venv_path = self._poetry.config.get("virtualenvs.path")
        if venv_path is None:
            venv_path = Path(CACHE_DIR) / "virtualenvs"
        else:
            venv_path = Path(venv_path)

        cwd = self._poetry.file.parent
        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
        base_env_name = self.generate_env_name(self._poetry.package.name,
                                               str(cwd))

        if python.startswith(base_env_name):
            venvs = self.list()
            for venv in venvs:
                if venv.path.name == python:
                    # Exact virtualenv name
                    if not envs_file.exists():
                        self.remove_venv(venv.path)

                        return venv

                    venv_minor = ".".join(
                        str(v) for v in venv.version_info[:2])
                    base_env_name = self.generate_env_name(cwd.name, str(cwd))
                    envs = envs_file.read()

                    current_env = envs.get(base_env_name)
                    if not current_env:
                        self.remove_venv(venv.path)

                        return venv

                    if current_env["minor"] == venv_minor:
                        del envs[base_env_name]
                        envs_file.write(envs)

                    self.remove_venv(venv.path)

                    return venv

            raise ValueError(
                '<warning>Environment "{}" does not exist.</warning>'.format(
                    python))

        try:
            python_version = Version.parse(python)
            python = "python{}".format(python_version.major)
            if python_version.precision > 1:
                python += ".{}".format(python_version.minor)
        except ValueError:
            # Executable in PATH or full executable path
            pass

        try:
            python_version = decode(
                subprocess.check_output(
                    list_to_shell_command([
                        python,
                        "-c",
                        "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"",
                    ]),
                    shell=True,
                ))
        except CalledProcessError as e:
            raise EnvCommandError(e)

        python_version = Version.parse(python_version.strip())
        minor = "{}.{}".format(python_version.major, python_version.minor)

        name = "{}-py{}".format(base_env_name, minor)
        venv = venv_path / name

        if not venv.exists():
            raise ValueError(
                '<warning>Environment "{}" does not exist.</warning>'.format(
                    name))

        if envs_file.exists():
            envs = envs_file.read()
            current_env = envs.get(base_env_name)
            if current_env is not None:
                current_minor = current_env["minor"]

                if current_minor == minor:
                    del envs[base_env_name]
                    envs_file.write(envs)

        self.remove_venv(venv)

        return VirtualEnv(venv)
예제 #27
0
    def get(self, reload: bool = False) -> Union["VirtualEnv", "SystemEnv"]:
        if self._env is not None and not reload:
            return self._env

        python_minor = ".".join([str(v) for v in sys.version_info[:2]])

        venv_path = self._poetry.config.get("virtualenvs.path")
        if venv_path is None:
            venv_path = Path(CACHE_DIR) / "virtualenvs"
        else:
            venv_path = Path(venv_path)

        cwd = self._poetry.file.parent
        envs_file = TOMLFile(venv_path / self.ENVS_FILE)
        env = None
        base_env_name = self.generate_env_name(self._poetry.package.name,
                                               str(cwd))
        if envs_file.exists():
            envs = envs_file.read()
            env = envs.get(base_env_name)
            if env:
                python_minor = env["minor"]

        # Check if we are inside a virtualenv or not
        # Conda sets CONDA_PREFIX in its envs, see
        # https://github.com/conda/conda/issues/2764
        env_prefix = os.environ.get("VIRTUAL_ENV",
                                    os.environ.get("CONDA_PREFIX"))
        conda_env_name = os.environ.get("CONDA_DEFAULT_ENV")
        # It's probably not a good idea to pollute Conda's global "base" env, since
        # most users have it activated all the time.
        in_venv = env_prefix is not None and conda_env_name != "base"

        if not in_venv or env is not None:
            # Checking if a local virtualenv exists
            if self._poetry.config.get("virtualenvs.in-project") is not False:
                if (cwd / ".venv").exists() and (cwd / ".venv").is_dir():
                    venv = cwd / ".venv"

                    return VirtualEnv(venv)

            create_venv = self._poetry.config.get("virtualenvs.create", True)

            if not create_venv:
                return SystemEnv(Path(sys.prefix))

            venv_path = self._poetry.config.get("virtualenvs.path")
            if venv_path is None:
                venv_path = Path(CACHE_DIR) / "virtualenvs"
            else:
                venv_path = Path(venv_path)

            name = "{}-py{}".format(base_env_name, python_minor.strip())

            venv = venv_path / name

            if not venv.exists():
                return SystemEnv(Path(sys.prefix))

            return VirtualEnv(venv)

        if env_prefix is not None:
            prefix = Path(env_prefix)
            base_prefix = None
        else:
            prefix = Path(sys.prefix)
            base_prefix = self.get_base_prefix()

        return VirtualEnv(prefix, base_prefix)
예제 #28
0
    def activate(self, python: str, io: IO) -> "Env":
        venv_path = self._poetry.config.get("virtualenvs.path")
        if venv_path is None:
            venv_path = Path(CACHE_DIR) / "virtualenvs"
        else:
            venv_path = Path(venv_path)

        cwd = self._poetry.file.parent

        envs_file = TOMLFile(venv_path / self.ENVS_FILE)

        try:
            python_version = Version.parse(python)
            python = "python{}".format(python_version.major)
            if python_version.precision > 1:
                python += ".{}".format(python_version.minor)
        except ValueError:
            # Executable in PATH or full executable path
            pass

        try:
            python_version = decode(
                subprocess.check_output(
                    list_to_shell_command([
                        python,
                        "-c",
                        "\"import sys; print('.'.join([str(s) for s in sys.version_info[:3]]))\"",
                    ]),
                    shell=True,
                ))
        except CalledProcessError as e:
            raise EnvCommandError(e)

        python_version = Version.parse(python_version.strip())
        minor = "{}.{}".format(python_version.major, python_version.minor)
        patch = python_version.text

        create = False
        is_root_venv = self._poetry.config.get("virtualenvs.in-project")
        # If we are required to create the virtual environment in the root folder,
        # create or recreate it if needed
        if is_root_venv:
            create = False
            venv = self._poetry.file.parent / ".venv"
            if venv.exists():
                # We need to check if the patch version is correct
                _venv = VirtualEnv(venv)
                current_patch = ".".join(
                    str(v) for v in _venv.version_info[:3])

                if patch != current_patch:
                    create = True

            self.create_venv(io, executable=python, force=create)

            return self.get(reload=True)

        envs = tomlkit.document()
        base_env_name = self.generate_env_name(self._poetry.package.name,
                                               str(cwd))
        if envs_file.exists():
            envs = envs_file.read()
            current_env = envs.get(base_env_name)
            if current_env is not None:
                current_minor = current_env["minor"]
                current_patch = current_env["patch"]

                if current_minor == minor and current_patch != patch:
                    # We need to recreate
                    create = True

        name = "{}-py{}".format(base_env_name, minor)
        venv = venv_path / name

        # Create if needed
        if not venv.exists() or venv.exists() and create:
            in_venv = os.environ.get("VIRTUAL_ENV") is not None
            if in_venv or not venv.exists():
                create = True

            if venv.exists():
                # We need to check if the patch version is correct
                _venv = VirtualEnv(venv)
                current_patch = ".".join(
                    str(v) for v in _venv.version_info[:3])

                if patch != current_patch:
                    create = True

            self.create_venv(io, executable=python, force=create)

        # Activate
        envs[base_env_name] = {"minor": minor, "patch": patch}
        envs_file.write(envs)

        return self.get(reload=True)
예제 #29
0
    def create_poetry(
            self,
            cwd=None,
            io=None):  # type: (Optional[Path], Optional[IO]) -> Poetry
        if io is None:
            io = NullIO()

        base_poetry = super(Factory, self).create_poetry(cwd)

        locker = Locker(base_poetry.file.parent / "poetry.lock",
                        base_poetry.local_config)

        # Loading global configuration
        config = self.create_config(io)

        # Loading local configuration
        local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
        if local_config_file.exists():
            if io.is_debug():
                io.write_line("Loading configuration file {}".format(
                    local_config_file.path))

            config.merge(local_config_file.read())

        # Load local sources
        repositories = {}
        existing_repositories = config.get("repositories", {})
        for source in base_poetry.pyproject.poetry_config.get("source", []):
            name = source.get("name")
            url = source.get("url")
            if name and url:
                if name not in existing_repositories:
                    repositories[name] = {"url": url}

        config.merge({"repositories": repositories})

        poetry = Poetry(
            base_poetry.file.path,
            base_poetry.local_config,
            base_poetry.package,
            locker,
            config,
        )

        # Configuring sources
        sources = poetry.local_config.get("source", [])
        for source in sources:
            repository = self.create_legacy_repository(source, config)
            is_default = source.get("default", False)
            is_secondary = source.get("secondary", False)
            if io.is_debug():
                message = "Adding repository {} ({})".format(
                    repository.name, repository.url)
                if is_default:
                    message += " and setting it as the default one"
                elif is_secondary:
                    message += " and setting it as secondary"

                io.write_line(message)

            poetry.pool.add_repository(repository,
                                       is_default,
                                       secondary=is_secondary)

        # Always put PyPI last to prefer private repositories
        # but only if we have no other default source
        if not poetry.pool.has_default():
            has_sources = bool(sources)
            poetry.pool.add_repository(PyPiRepository(), not has_sources,
                                       has_sources)
        else:
            if io.is_debug():
                io.write_line("Deactivating the PyPI repository")

        return poetry
예제 #30
0
파일: locker.py 프로젝트: timgates42/poetry
 def __init__(self, lock: str | Path, local_config: dict[str, Any]) -> None:
     self._lock = TOMLFile(lock)
     self._local_config = local_config
     self._lock_data: TOMLDocument | None = None
     self._content_hash = self._get_content_hash()