Ejemplo n.º 1
0
def test_activate_activates_existing_virtualenv_no_envs_file(
    tmp_dir, manager, poetry, config, mocker
):
    if "VIRTUAL_ENV" in os.environ:
        del os.environ["VIRTUAL_ENV"]

    venv_name = manager.generate_env_name("simple-project", str(poetry.file.parent))

    os.mkdir(os.path.join(tmp_dir, "{}-py3.7".format(venv_name)))

    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    mocker.patch(
        "poetry.utils._compat.subprocess.check_output",
        side_effect=check_output_wrapper(),
    )
    mocker.patch(
        "poetry.utils._compat.subprocess.Popen.communicate",
        side_effect=[("/prefix", None)],
    )
    m = mocker.patch("poetry.utils.env.EnvManager.build_venv", side_effect=build_venv)

    env = manager.activate("python3.7", NullIO())

    m.assert_not_called()

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    assert env.path == Path(tmp_dir) / "{}-py3.7".format(venv_name)
    assert env.base == Path("/prefix")
Ejemplo n.º 2
0
def test_remove_also_deactivates(tmp_dir, manager, poetry, config, mocker):
    config.merge({"virtualenvs": {"path": str(tmp_dir)}})

    venv_name = manager.generate_env_name("simple-project",
                                          str(poetry.file.parent))
    (Path(tmp_dir) / "{}-py3.7".format(venv_name)).mkdir()
    (Path(tmp_dir) / "{}-py3.6".format(venv_name)).mkdir()

    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(Version.parse("3.6.6")),
    )

    envs_file = TOMLFile(Path(tmp_dir) / "envs.toml")
    doc = tomlkit.document()
    doc[venv_name] = {"minor": "3.6", "patch": "3.6.6"}
    envs_file.write(doc)

    venv = manager.remove("python3.6")

    assert (Path(tmp_dir) / "{}-py3.6".format(venv_name)) == venv.path
    assert not (Path(tmp_dir) / "{}-py3.6".format(venv_name)).exists()

    envs = envs_file.read()
    assert venv_name not in envs
Ejemplo n.º 3
0
def test_activate_activates_non_existing_virtualenv_no_envs_file(
        mocker, tester, venv_cache, venv_name, venvs_in_cache_config):
    mocker.patch(
        "poetry.utils._compat.subprocess.check_output",
        side_effect=check_output_wrapper(),
    )

    mock_build_env = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                                  side_effect=build_venv)

    tester.execute("3.7")

    venv_py37 = venv_cache / "{}-py3.7".format(venv_name)
    mock_build_env.assert_called_with(venv_py37, executable="python3.7")

    envs_file = TOMLFile(venv_cache / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    expected = """\
Creating virtualenv {} in {}
Using virtualenv: {}
""".format(
        venv_py37.name,
        venv_py37.parent,
        venv_py37,
    )

    assert expected == tester.io.fetch_output()
Ejemplo n.º 4
0
    def create_poetry(
        self,
        cwd: Optional[Path] = None,
        io: Optional[IO] = None,
        disable_plugins: bool = False,
    ) -> Poetry:
        if io is None:
            io = NullIO()

        base_poetry = super(Factory, self).create_poetry(cwd)

        locker = Locker(
            base_poetry.file.parent / "poetry.lock", base_poetry.local_config
        )

        # Loading global configuration
        config = self.create_config(io)

        # Loading local configuration
        local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
        if local_config_file.exists():
            if io.is_debug():
                io.write_line(
                    "Loading configuration file {}".format(local_config_file.path)
                )

            config.merge(local_config_file.read())

        # Load local sources
        repositories = {}
        existing_repositories = config.get("repositories", {})
        for source in base_poetry.pyproject.poetry_config.get("source", []):
            name = source.get("name")
            url = source.get("url")
            if name and url:
                if name not in existing_repositories:
                    repositories[name] = {"url": url}

        config.merge({"repositories": repositories})

        poetry = Poetry(
            base_poetry.file.path,
            base_poetry.local_config,
            base_poetry.package,
            locker,
            config,
        )

        # Configuring sources
        self.configure_sources(
            poetry, poetry.local_config.get("source", []), config, io
        )

        plugin_manager = PluginManager("plugin", disable_plugins=disable_plugins)
        plugin_manager.load_plugins()
        poetry.set_plugin_manager(plugin_manager)
        plugin_manager.activate(poetry, io)

        return poetry
Ejemplo n.º 5
0
def test_activate_activates_non_existing_virtualenv_no_envs_file(
    mocker: MockerFixture,
    tester: CommandTester,
    venv_cache: Path,
    venv_name: str,
    venvs_in_cache_config: None,
):
    mocker.patch(
        "subprocess.check_output",
        side_effect=check_output_wrapper(),
    )

    mock_build_env = mocker.patch("poetry.utils.env.EnvManager.build_venv",
                                  side_effect=build_venv)

    tester.execute("3.7")

    venv_py37 = venv_cache / f"{venv_name}-py3.7"
    mock_build_env.assert_called_with(
        venv_py37,
        executable="/usr/bin/python3.7",
        flags={
            "always-copy": False,
            "system-site-packages": False,
            "no-pip": False,
            "no-setuptools": False,
        },
        prompt="simple-project-py3.7",
    )

    envs_file = TOMLFile(venv_cache / "envs.toml")
    assert envs_file.exists()
    envs = envs_file.read()
    assert envs[venv_name]["minor"] == "3.7"
    assert envs[venv_name]["patch"] == "3.7.1"

    expected = f"""\
Creating virtualenv {venv_py37.name} in {venv_py37.parent}
Using virtualenv: {venv_py37}
"""

    assert tester.io.fetch_output() == expected
Ejemplo n.º 6
0
class Locker(object):

    _VERSION = "1.1"

    _relevant_keys = ["dependencies", "dev-dependencies", "source", "extras"]

    def __init__(self, lock, local_config):  # type: (Path, dict) -> None
        self._lock = TOMLFile(lock)
        self._local_config = local_config
        self._lock_data = None
        self._content_hash = self._get_content_hash()

    @property
    def lock(self):  # type: () -> TOMLFile
        return self._lock

    @property
    def lock_data(self):
        if self._lock_data is None:
            self._lock_data = self._get_lock_data()

        return self._lock_data

    def is_locked(self):  # type: () -> bool
        """
        Checks whether the locker has been locked (lockfile found).
        """
        if not self._lock.exists():
            return False

        return "package" in self.lock_data

    def is_fresh(self):  # type: () -> bool
        """
        Checks whether the lock file is still up to date with the current hash.
        """
        lock = self._lock.read()
        metadata = lock.get("metadata", {})

        if "content-hash" in metadata:
            return self._content_hash == lock["metadata"]["content-hash"]

        return False

    def locked_repository(self,
                          with_dev_reqs=False
                          ):  # type: (bool) -> poetry.repositories.Repository
        """
        Searches and returns a repository of locked packages.
        """
        from poetry.factory import Factory

        if not self.is_locked():
            return poetry.repositories.Repository()

        lock_data = self.lock_data
        packages = poetry.repositories.Repository()

        if with_dev_reqs:
            locked_packages = lock_data["package"]
        else:
            locked_packages = [
                p for p in lock_data["package"] if p["category"] == "main"
            ]

        if not locked_packages:
            return packages

        for info in locked_packages:
            source = info.get("source", {})
            source_type = source.get("type")
            url = source.get("url")
            if source_type in ["directory", "file"]:
                url = self._lock.path.parent.joinpath(url).resolve().as_posix()

            package = Package(
                info["name"],
                info["version"],
                info["version"],
                source_type=source_type,
                source_url=url,
                source_reference=source.get("reference"),
                source_resolved_reference=source.get("resolved_reference"),
            )
            package.description = info.get("description", "")
            package.category = info["category"]
            package.optional = info["optional"]
            if "hashes" in lock_data["metadata"]:
                # Old lock so we create dummy files from the hashes
                package.files = [{
                    "name": h,
                    "hash": h
                } for h in lock_data["metadata"]["hashes"][info["name"]]]
            else:
                package.files = lock_data["metadata"]["files"][info["name"]]

            package.python_versions = info["python-versions"]
            extras = info.get("extras", {})
            if extras:
                for name, deps in extras.items():
                    package.extras[name] = []

                    for dep in deps:
                        m = re.match(r"^(.+?)(?:\s+\((.+)\))?$", dep)
                        dep_name = m.group(1)
                        constraint = m.group(2) or "*"

                        package.extras[name].append(
                            Dependency(dep_name, constraint))

            if "marker" in info:
                package.marker = parse_marker(info["marker"])
            else:
                # Compatibility for old locks
                if "requirements" in info:
                    dep = Dependency("foo", "0.0.0")
                    for name, value in info["requirements"].items():
                        if name == "python":
                            dep.python_versions = value
                        elif name == "platform":
                            dep.platform = value

                    split_dep = dep.to_pep_508(False).split(";")
                    if len(split_dep) > 1:
                        package.marker = parse_marker(split_dep[1].strip())

            for dep_name, constraint in info.get("dependencies", {}).items():
                if isinstance(constraint, list):
                    for c in constraint:
                        package.add_dependency(
                            Factory.create_dependency(
                                dep_name, c, root_dir=self._lock.path.parent))

                    continue

                package.add_dependency(
                    Factory.create_dependency(dep_name,
                                              constraint,
                                              root_dir=self._lock.path.parent))

            if "develop" in info:
                package.develop = info["develop"]

            packages.add_package(package)

        return packages

    def get_project_dependencies(
            self,
            project_requires,
            pinned_versions=False,
            with_nested=False):  # type: (List[Dependency], bool, bool) -> Any
        packages = self.locked_repository().packages

        # group packages entries by name, this is required because requirement might use
        # different constraints
        packages_by_name = {}
        for pkg in packages:
            if pkg.name not in packages_by_name:
                packages_by_name[pkg.name] = []
            packages_by_name[pkg.name].append(pkg)

        def __get_locked_package(
            _dependency, ):  # type: (Dependency) -> Optional[Package]
            """
            Internal helper to identify corresponding locked package using dependency
            version constraints.
            """
            for _package in packages_by_name.get(_dependency.name, []):
                if _dependency.constraint.allows(_package.version):
                    return _package
            return None

        project_level_dependencies = set()
        dependencies = []

        for dependency in project_requires:
            dependency = deepcopy(dependency)
            if pinned_versions:
                locked_package = __get_locked_package(dependency)
                if locked_package:
                    dependency.set_constraint(
                        locked_package.to_dependency().constraint)
            project_level_dependencies.add(dependency.name)
            dependencies.append(dependency)

        if not with_nested:
            # return only with project level dependencies
            return dependencies

        nested_dependencies = list()

        for pkg in packages:  # type: Package
            for requirement in pkg.requires:  # type: Dependency
                if requirement.name in project_level_dependencies:
                    # project level dependencies take precedence
                    continue

                if pinned_versions:
                    requirement.set_constraint(
                        __get_locked_package(
                            requirement).to_dependency().constraint)

                # dependencies use extra to indicate that it was activated via parent
                # package's extras
                marker = requirement.marker.without_extras()
                for project_requirement in project_requires:
                    if (pkg.name == project_requirement.name
                            and project_requirement.constraint.allows(
                                pkg.version)):
                        requirement.marker = marker.intersect(
                            project_requirement.marker)
                        break
                else:
                    # this dependency was not from a project requirement
                    requirement.marker = marker.intersect(pkg.marker)

                if requirement not in nested_dependencies:
                    nested_dependencies.append(requirement)

        return sorted(
            itertools.chain(dependencies, nested_dependencies),
            key=lambda x: x.name.lower(),
        )

    def set_lock_data(self, root, packages):  # type: (...) -> bool
        files = table()
        packages = self._lock_packages(packages)
        # Retrieving hashes
        for package in packages:
            if package["name"] not in files:
                files[package["name"]] = []

            for f in package["files"]:
                file_metadata = inline_table()
                for k, v in sorted(f.items()):
                    file_metadata[k] = v

                files[package["name"]].append(file_metadata)

            if files[package["name"]]:
                files[package["name"]] = item(
                    files[package["name"]]).multiline(True)

            del package["files"]

        lock = document()
        lock["package"] = packages

        if root.extras:
            lock["extras"] = {
                extra: [dep.pretty_name for dep in deps]
                for extra, deps in sorted(root.extras.items())
            }

        lock["metadata"] = OrderedDict([
            ("lock-version", self._VERSION),
            ("python-versions", root.python_versions),
            ("content-hash", self._content_hash),
            ("files", files),
        ])

        if not self.is_locked() or lock != self.lock_data:
            self._write_lock_data(lock)

            return True

        return False

    def _write_lock_data(self, data):
        self.lock.write(data)

        # Checking lock file data consistency
        if data != self.lock.read():
            raise RuntimeError("Inconsistent lock file data.")

        self._lock_data = None

    def _get_content_hash(self):  # type: () -> str
        """
        Returns the sha256 hash of the sorted content of the pyproject file.
        """
        content = self._local_config

        relevant_content = {}
        for key in self._relevant_keys:
            relevant_content[key] = content.get(key)

        content_hash = sha256(
            json.dumps(relevant_content, sort_keys=True).encode()).hexdigest()

        return content_hash

    def _get_lock_data(self):  # type: () -> dict
        if not self._lock.exists():
            raise RuntimeError(
                "No lockfile found. Unable to read locked packages")

        try:
            lock_data = self._lock.read()
        except TOMLKitError as e:
            raise RuntimeError("Unable to read the lock file ({}).".format(e))

        lock_version = Version.parse(lock_data["metadata"].get(
            "lock-version", "1.0"))
        current_version = Version.parse(self._VERSION)
        # We expect the locker to be able to read lock files
        # from the same semantic versioning range
        accepted_versions = parse_constraint("^{}".format(
            Version(current_version.major, 0)))
        lock_version_allowed = accepted_versions.allows(lock_version)
        if lock_version_allowed and current_version < lock_version:
            logger.warning(
                "The lock file might not be compatible with the current version of Poetry.\n"
                "Upgrade Poetry to ensure the lock file is read properly or, alternatively, "
                "regenerate the lock file with the `poetry lock` command.")
        elif not lock_version_allowed:
            raise RuntimeError(
                "The lock file is not compatible with the current version of Poetry.\n"
                "Upgrade Poetry to be able to read the lock file or, alternatively, "
                "regenerate the lock file with the `poetry lock` command.")

        return lock_data

    def _lock_packages(
            self, packages):  # type: (List['poetry.packages.Package']) -> list
        locked = []

        for package in sorted(packages, key=lambda x: x.name):
            spec = self._dump_package(package)

            locked.append(spec)

        return locked

    def _dump_package(self, package):  # type: (Package) -> dict
        dependencies = {}
        for dependency in sorted(package.requires, key=lambda d: d.name):
            if dependency.pretty_name not in dependencies:
                dependencies[dependency.pretty_name] = []

            constraint = inline_table()
            constraint["version"] = str(dependency.pretty_constraint)

            if dependency.extras:
                constraint["extras"] = sorted(dependency.extras)

            if dependency.is_optional():
                constraint["optional"] = True

            if not dependency.marker.is_any():
                constraint["markers"] = str(dependency.marker)

            dependencies[dependency.pretty_name].append(constraint)

        # All the constraints should have the same type,
        # but we want to simplify them if it's possible
        for dependency, constraints in tuple(dependencies.items()):
            if all(len(constraint) == 1 for constraint in constraints):
                dependencies[dependency] = [
                    constraint["version"] for constraint in constraints
                ]

        data = OrderedDict([
            ("name", package.pretty_name),
            ("version", package.pretty_version),
            ("description", package.description or ""),
            ("category", package.category),
            ("optional", package.optional),
            ("python-versions", package.python_versions),
            ("files", sorted(package.files, key=lambda x: x["file"])),
        ])

        if dependencies:
            data["dependencies"] = table()
            for k, constraints in dependencies.items():
                if len(constraints) == 1:
                    data["dependencies"][k] = constraints[0]
                else:
                    data["dependencies"][k] = array().multiline(True)
                    for constraint in constraints:
                        data["dependencies"][k].append(constraint)

        if package.extras:
            extras = {}
            for name, deps in package.extras.items():
                extras[name] = [
                    str(dep) if not dep.constraint.is_any() else dep.name
                    for dep in deps
                ]

            data["extras"] = extras

        if package.source_url:
            url = package.source_url
            if package.source_type in ["file", "directory"]:
                # The lock file should only store paths relative to the root project
                url = Path(
                    os.path.relpath(
                        Path(url).as_posix(),
                        self._lock.path.parent.as_posix())).as_posix()

            data["source"] = OrderedDict()

            if package.source_type:
                data["source"]["type"] = package.source_type

            data["source"]["url"] = url

            if package.source_reference:
                data["source"]["reference"] = package.source_reference

            if package.source_resolved_reference:
                data["source"][
                    "resolved_reference"] = package.source_resolved_reference

            if package.source_type == "directory":
                data["develop"] = package.develop

        return data