Beispiel #1
0
    def is_excluded(self, filepath):  # type: (Union[str, Path]) -> bool
        exclude_path = Path(filepath)

        while True:
            if exclude_path.as_posix() in self.find_excluded_files():
                return True

            if len(exclude_path.parts) > 1:
                exclude_path = exclude_path.parent
            else:
                break

        return False
Beispiel #2
0
def test_env_site_simple(tmp_dir, mocker):
    # emulate permission error when creating directory
    mocker.patch("poetry.utils._compat.Path.mkdir", side_effect=OSError())
    site_packages = SitePackages(Path("/non-existent"), fallbacks=[Path(tmp_dir)])
    candidates = site_packages.make_candidates(Path("hello.txt"), writable_only=True)
    hello = Path(tmp_dir) / "hello.txt"

    assert len(candidates) == 1
    assert candidates[0].as_posix() == hello.as_posix()

    content = decode(str(uuid.uuid4()))
    site_packages.write_text(Path("hello.txt"), content, encoding="utf-8")

    assert hello.read_text(encoding="utf-8") == content

    assert not (site_packages.path / "hello.txt").exists()
Beispiel #3
0
    def search_for_vcs(self, dependency):  # type: (VCSDependency) -> List[Package]
        """
        Search for the specifications that match the given VCS dependency.

        Basically, we clone the repository in a temporary directory
        and get the information we need by checking out the specified reference.
        """
        if dependency.vcs != "git":
            raise ValueError("Unsupported VCS dependency {}".format(dependency.vcs))

        tmp_dir = Path(mkdtemp(prefix="pypoetry-git-{}".format(dependency.name)))

        try:
            git = Git()
            git.clone(dependency.source, tmp_dir)
            git.checkout(dependency.reference, tmp_dir)
            revision = git.rev_parse(dependency.reference, tmp_dir).strip()

            if dependency.tag or dependency.rev:
                revision = dependency.reference

            directory_dependency = DirectoryDependency(
                dependency.name,
                tmp_dir,
                category=dependency.category,
                optional=dependency.is_optional(),
            )
            for extra in dependency.extras:
                directory_dependency.extras.append(extra)

            package = self.search_for_directory(directory_dependency)[0]

            package.source_type = "git"
            package.source_url = dependency.source
            package.source_reference = revision
        except Exception:
            raise
        finally:
            shutil.rmtree(tmp_dir.as_posix())

        return [package]
Beispiel #4
0
    def search_for_vcs(self,
                       dependency):  # type: (VCSDependency) -> List[Package]
        """
        Search for the specifications that match the given VCS dependency.

        Basically, we clone the repository in a temporary directory
        and get the information we need by checking out the specified reference.
        """
        if dependency.vcs != "git":
            raise ValueError("Unsupported VCS dependency {}".format(
                dependency.vcs))

        tmp_dir = Path(
            mkdtemp(prefix="pypoetry-git-{}".format(dependency.name)))

        try:
            git = Git()
            git.clone(dependency.source, tmp_dir)
            git.checkout(dependency.reference, tmp_dir)
            revision = git.rev_parse(dependency.reference, tmp_dir).strip()

            if dependency.tag or dependency.rev:
                revision = dependency.reference

            pyproject = TomlFile(tmp_dir / "pyproject.toml")
            pyproject_content = None
            has_poetry = False
            if pyproject.exists():
                pyproject_content = pyproject.read()
                has_poetry = ("tool" in pyproject_content
                              and "poetry" in pyproject_content["tool"])

            if pyproject_content and has_poetry:
                # If a pyproject.toml file exists
                # We use it to get the information we need
                info = pyproject_content["tool"]["poetry"]

                name = info["name"]
                version = info["version"]
                package = Package(name, version, version)
                package.source_type = dependency.vcs
                package.source_url = dependency.source
                package.source_reference = dependency.reference
                for req_name, req_constraint in info["dependencies"].items():
                    if req_name == "python":
                        package.python_versions = req_constraint
                        continue

                    package.add_dependency(req_name, req_constraint)
            else:
                # We need to use setup.py here
                # to figure the information we need
                # We need to place ourselves in the proper
                # folder for it to work
                venv = Venv.create(self._io)

                current_dir = os.getcwd()
                os.chdir(tmp_dir.as_posix())

                try:
                    venv.run("python", "setup.py", "egg_info")

                    # Sometimes pathlib will fail on recursive
                    # symbolic links, so we need to workaround it
                    # and use the glob module instead.
                    # Note that this does not happen with pathlib2
                    # so it's safe to use it for Python < 3.4.
                    if PY35:
                        egg_info = next(
                            Path(p) for p in glob.glob(
                                os.path.join(str(tmp_dir), "**", "*.egg-info"),
                                recursive=True,
                            ))
                    else:
                        egg_info = next(tmp_dir.glob("**/*.egg-info"))

                    meta = pkginfo.UnpackedSDist(str(egg_info))

                    if meta.requires_dist:
                        reqs = list(meta.requires_dist)
                    else:
                        reqs = []
                        requires = egg_info / "requires.txt"
                        if requires.exists():
                            with requires.open() as f:
                                reqs = parse_requires(f.read())

                    package = Package(meta.name, meta.version)

                    for req in reqs:
                        dep = dependency_from_pep_508(req)
                        if dep.in_extras:
                            for extra in dep.in_extras:
                                if extra not in package.extras:
                                    package.extras[extra] = []

                                package.extras[extra].append(dep)

                        package.requires.append(dep)
                except Exception:
                    raise
                finally:
                    os.chdir(current_dir)

            package.source_type = "git"
            package.source_url = dependency.source
            package.source_reference = revision
        except Exception:
            raise
        finally:
            shutil.rmtree(tmp_dir.as_posix())

        if dependency.name != package.name:
            # For now, the dependency's name must match the actual package's name
            raise RuntimeError(
                "The dependency name for {} does not match the actual package's name: {}"
                .format(dependency.name, package.name))

        if dependency.extras:
            for extra in dependency.extras:
                if extra in package.extras:
                    for dep in package.extras[extra]:
                        dep.activate()

        return [package]
Beispiel #5
0
    def search_for_vcs(self,
                       dependency):  # type: (VCSDependency) -> List[Package]
        """
        Search for the specifications that match the given VCS dependency.

        Basically, we clone the repository in a temporary directory
        and get the information we need by checking out the specified reference.
        """
        if dependency.vcs != 'git':
            raise ValueError('Unsupported VCS dependency {}'.format(
                dependency.vcs))

        tmp_dir = Path(
            mkdtemp(prefix='pypoetry-git-{}'.format(dependency.name)))

        try:
            git = Git()
            git.clone(dependency.source, tmp_dir)
            git.checkout(dependency.reference, tmp_dir)
            revision = git.rev_parse(dependency.reference, tmp_dir).strip()

            if dependency.tag or dependency.rev:
                revision = dependency.reference

            pyproject = TomlFile(tmp_dir / 'pyproject.toml')
            pyproject_content = None
            has_poetry = False
            if pyproject.exists():
                pyproject_content = pyproject.read(True)
                has_poetry = ('tool' in pyproject_content
                              and 'poetry' in pyproject_content['tool'])

            if pyproject_content and has_poetry:
                # If a pyproject.toml file exists
                # We use it to get the information we need
                info = pyproject_content['tool']['poetry']

                name = info['name']
                version = info['version']
                package = Package(name, version, version)
                package.source_type = dependency.vcs
                package.source_url = dependency.source
                package.source_reference = dependency.reference
                for req_name, req_constraint in info['dependencies'].items():
                    if req_name == 'python':
                        package.python_versions = req_constraint
                        continue

                    package.add_dependency(req_name, req_constraint)
            else:
                # We need to use setup.py here
                # to figure the information we need
                # We need to place ourselves in the proper
                # folder for it to work
                venv = Venv.create(self._io)

                current_dir = os.getcwd()
                os.chdir(tmp_dir.as_posix())

                try:
                    venv.run('python', 'setup.py', 'egg_info')

                    egg_info = list(tmp_dir.glob('*.egg-info'))[0]

                    meta = pkginfo.UnpackedSDist(str(egg_info))

                    if meta.requires_dist:
                        reqs = list(meta.requires_dist)
                    else:
                        reqs = []
                        requires = egg_info / 'requires.txt'
                        if requires.exists():
                            with requires.open() as f:
                                reqs = parse_requires(f.read())

                    package = Package(meta.name, meta.version)

                    for req in reqs:
                        package.requires.append(dependency_from_pep_508(req))
                except Exception:
                    raise
                finally:
                    os.chdir(current_dir)

            package.source_type = 'git'
            package.source_url = dependency.source
            package.source_reference = revision
        except Exception:
            raise
        finally:
            shutil.rmtree(tmp_dir.as_posix())

        return [package]
Beispiel #6
0
class LegacyRepository(PyPiRepository):
    def __init__(self, name, url):
        if name == 'pypi':
            raise ValueError('The name [pypi] is reserved for repositories')

        self._packages = []
        self._name = name
        self._url = url
        command = get_pip_command()
        opts, _ = command.parse_args([])
        self._session = command._build_session(opts)
        self._repository = PyPIRepository(opts, self._session)
        self._cache_dir = Path(CACHE_DIR) / 'cache' / 'repositories' / name

        self._cache = CacheManager({
            'default': 'releases',
            'serializer': 'json',
            'stores': {
                'releases': {
                    'driver': 'file',
                    'path': str(self._cache_dir)
                },
                'packages': {
                    'driver': 'dict'
                },
                'matches': {
                    'driver': 'dict'
                }
            }
        })

    @property
    def name(self):
        return self._name

    def find_packages(self,
                      name,
                      constraint=None,
                      extras=None,
                      allow_prereleases=False):
        packages = []

        if constraint is not None and not isinstance(constraint,
                                                     BaseConstraint):
            version_parser = VersionParser()
            constraint = version_parser.parse_constraints(constraint)

        key = name
        if constraint:
            key = '{}:{}'.format(key, str(constraint))

        if self._cache.store('matches').has(key):
            versions = self._cache.store('matches').get(key)
        else:
            candidates = [
                str(c.version)
                for c in self._repository.find_all_candidates(name)
            ]

            versions = []
            for version in candidates:
                if version in versions:
                    continue

                if (not constraint
                        or (constraint
                            and constraint.matches(Constraint('=', version)))):
                    versions.append(version)

            self._cache.store('matches').put(key, versions, 5)

        for version in versions:
            packages.append(Package(name, version, extras=extras))

        return packages

    def package(self,
                name,
                version,
                extras=None):  # type: (...) -> poetry.packages.Package
        """
        Retrieve the release information.

        This is a heavy task which takes time.
        We have to download a package to get the dependencies.
        We also need to download every file matching this release
        to get the various hashes.
        
        Note that, this will be cached so the subsequent operations
        should be much faster.
        """
        try:
            index = self._packages.index(
                poetry.packages.Package(name, version, version))

            return self._packages[index]
        except ValueError:
            if extras is None:
                extras = []

            release_info = self.get_release_info(name, version)
            package = poetry.packages.Package(name, version, version)
            for req in release_info['requires_dist']:
                try:
                    dependency = dependency_from_pep_508(req)
                except InvalidMarker:
                    # Invalid marker
                    # We strip the markers hoping for the best
                    req = req.split(';')[0]

                    dependency = dependency_from_pep_508(req)

                if dependency.extras:
                    for extra in dependency.extras:
                        if extra not in package.extras:
                            package.extras[extra] = []

                        package.extras[extra].append(dependency)

                if not dependency.is_optional():
                    package.requires.append(dependency)

            # Adding description
            package.description = release_info.get('summary', '')

            # Adding hashes information
            package.hashes = release_info['digests']

            # Activate extra dependencies
            for extra in extras:
                if extra in package.extras:
                    for dep in package.extras[extra]:
                        dep.activate()

                    package.requires += package.extras[extra]

            self._packages.append(package)

            return package

    def get_release_info(self, name, version):  # type: (str, str) -> dict
        """
        Return the release information given a package name and a version.

        The information is returned from the cache if it exists
        or retrieved from the remote server.
        """
        return self._cache.store('releases').remember_forever(
            '{}:{}'.format(name, version),
            lambda: self._get_release_info(name, version))

    def _get_release_info(self, name, version):  # type: (str, str) -> dict
        ireq = InstallRequirement.from_line('{}=={}'.format(name, version))
        resolver = Resolver([ireq],
                            self._repository,
                            cache=DependencyCache(self._cache_dir.as_posix()))
        try:
            requirements = list(resolver._iter_dependencies(ireq))
        except (InstallationError, RequirementParseError):
            # setup.py egg-info error most likely
            # So we assume no dependencies
            requirements = []

        requires = []
        for dep in requirements:
            constraint = str(dep.req.specifier)
            require = dep.name
            if constraint:
                require += ' ({})'.format(constraint)

            requires.append(require)

        try:
            hashes = resolver.resolve_hashes([ireq])[ireq]
        except IndexError:
            # Sometimes pip-tools fails when getting indices
            hashes = []

        hashes = [h.split(':')[1] for h in hashes]

        data = {
            'name': name,
            'version': version,
            'summary': '',
            'requires_dist': requires,
            'digests': hashes
        }

        resolver.repository.freshen_build_caches()

        return data
Beispiel #7
0
    def search_for_vcs(self, dependency):  # type: (VCSDependency) -> List[Package]
        """
        Search for the specifications that match the given VCS dependency.

        Basically, we clone the repository in a temporary directory
        and get the information we need by checking out the specified reference.
        """
        if dependency.vcs != "git":
            raise ValueError("Unsupported VCS dependency {}".format(dependency.vcs))

        tmp_dir = Path(mkdtemp(prefix="pypoetry-git-{}".format(dependency.name)))

        try:
            git = Git()
            git.clone(dependency.source, tmp_dir)
            git.checkout(dependency.reference, tmp_dir)
            revision = git.rev_parse(dependency.reference, tmp_dir).strip()

            if dependency.tag or dependency.rev:
                revision = dependency.reference

            pyproject = TomlFile(tmp_dir / "pyproject.toml")
            pyproject_content = None
            has_poetry = False
            if pyproject.exists():
                pyproject_content = pyproject.read()
                has_poetry = (
                    "tool" in pyproject_content
                    and "poetry" in pyproject_content["tool"]
                )

            if pyproject_content and has_poetry:
                # If a pyproject.toml file exists
                # We use it to get the information we need
                info = pyproject_content["tool"]["poetry"]

                name = info["name"]
                version = info["version"]
                package = Package(name, version, version)
                package.source_type = dependency.vcs
                package.source_url = dependency.source
                package.source_reference = dependency.reference
                for req_name, req_constraint in info["dependencies"].items():
                    if req_name == "python":
                        package.python_versions = req_constraint
                        continue

                    package.add_dependency(req_name, req_constraint)
            else:
                # We need to use setup.py here
                # to figure the information we need
                # We need to place ourselves in the proper
                # folder for it to work
                venv = Venv.create(self._io)

                current_dir = os.getcwd()
                os.chdir(tmp_dir.as_posix())

                try:
                    venv.run("python", "setup.py", "egg_info")

                    # Sometimes pathlib will fail on recursive
                    # symbolic links, so we need to workaround it
                    # and use the glob module instead.
                    # Note that this does not happen with pathlib2
                    # so it's safe to use it for Python < 3.4.
                    if PY35:
                        egg_info = next(
                            Path(p)
                            for p in glob.glob(
                                os.path.join(str(tmp_dir), "**", "*.egg-info"),
                                recursive=True,
                            )
                        )
                    else:
                        egg_info = next(tmp_dir.glob("**/*.egg-info"))

                    meta = pkginfo.UnpackedSDist(str(egg_info))

                    if meta.requires_dist:
                        reqs = list(meta.requires_dist)
                    else:
                        reqs = []
                        requires = egg_info / "requires.txt"
                        if requires.exists():
                            with requires.open() as f:
                                reqs = parse_requires(f.read())

                    package = Package(meta.name, meta.version)

                    for req in reqs:
                        dep = dependency_from_pep_508(req)
                        if dep.in_extras:
                            for extra in dep.in_extras:
                                if extra not in package.extras:
                                    package.extras[extra] = []

                                package.extras[extra].append(dep)

                        package.requires.append(dep)
                except Exception:
                    raise
                finally:
                    os.chdir(current_dir)

            package.source_type = "git"
            package.source_url = dependency.source
            package.source_reference = revision
        except Exception:
            raise
        finally:
            shutil.rmtree(tmp_dir.as_posix())

        if dependency.name != package.name:
            # For now, the dependency's name must match the actual package's name
            raise RuntimeError(
                "The dependency name for {} does not match the actual package's name: {}".format(
                    dependency.name, package.name
                )
            )

        if dependency.extras:
            for extra in dependency.extras:
                if extra in package.extras:
                    for dep in package.extras[extra]:
                        dep.activate()

        return [package]
Beispiel #8
0
def test_complete_no_vcs():
    # Copy the complete fixtures dir to a temporary directory
    module_path = fixtures_dir / "complete"
    temporary_dir = Path(tempfile.mkdtemp()) / "complete"

    shutil.copytree(module_path.as_posix(), temporary_dir.as_posix())

    builder = CompleteBuilder(
        Poetry.create(temporary_dir), NullEnv(execute=True), NullIO()
    )
    builder.build()

    whl = temporary_dir / "dist" / "my_package-1.2.3-py3-none-any.whl"

    assert whl.exists()

    zip = zipfile.ZipFile(str(whl))

    # Check the zipped file to be sure that included and excluded files are
    # correctly taken account of without vcs
    expected_name_list = [
        "my_package/__init__.py",
        "my_package/data1/test.json",
        "my_package/sub_pkg1/__init__.py",
        "my_package/sub_pkg2/__init__.py",
        "my_package/sub_pkg2/data2/data.json",
        "my_package-1.2.3.dist-info/entry_points.txt",
        "my_package-1.2.3.dist-info/LICENSE",
        "my_package-1.2.3.dist-info/WHEEL",
        "my_package-1.2.3.dist-info/METADATA",
        "my_package-1.2.3.dist-info/RECORD",
    ]

    assert sorted(zip.namelist()) == sorted(expected_name_list)

    try:
        entry_points = zip.read("my_package-1.2.3.dist-info/entry_points.txt")

        assert (
            decode(entry_points.decode())
            == """\
[console_scripts]
extra-script=my_package.extra:main[time]
my-2nd-script=my_package:main2
my-script=my_package:main

"""
        )
        wheel_data = decode(zip.read("my_package-1.2.3.dist-info/WHEEL"))

        assert (
            wheel_data
            == """\
Wheel-Version: 1.0
Generator: poetry {}
Root-Is-Purelib: true
Tag: py3-none-any
""".format(
                __version__
            )
        )
        wheel_data = decode(zip.read("my_package-1.2.3.dist-info/METADATA"))

        assert (
            wheel_data
            == """\
Metadata-Version: 2.1
Name: my-package
Version: 1.2.3
Summary: Some description.
Home-page: https://poetry.eustace.io/
License: MIT
Keywords: packaging,dependency,poetry
Author: Sébastien Eustace
Author-email: [email protected]
Requires-Python: >=3.6,<4.0
Classifier: License :: OSI Approved :: MIT License
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Topic :: Software Development :: Build Tools
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Provides-Extra: time
Requires-Dist: cachy[msgpack] (>=0.2.0,<0.3.0)
Requires-Dist: cleo (>=0.6,<0.7)
Requires-Dist: pendulum (>=1.4,<2.0); extra == "time"
Project-URL: Documentation, https://poetry.eustace.io/docs
Project-URL: Repository, https://github.com/sdispater/poetry
Description-Content-Type: text/x-rst

My Package
==========

"""
        )
    finally:
        zip.close()
Beispiel #9
0
    def search_for_vcs(self,
                       dependency):  # type: (VCSDependency) -> List[Package]
        """
        Search for the specifications that match the given VCS dependency.

        Basically, we clone the repository in a temporary directory
        and get the information we need by checking out the specified reference.
        """
        if dependency.vcs != 'git':
            raise ValueError('Unsupported VCS dependency {}'.format(
                dependency.vcs))

        tmp_dir = Path(
            mkdtemp(prefix='pypoetry-git-{}'.format(dependency.name)))

        try:
            git = Git()
            git.clone(dependency.source, tmp_dir)
            git.checkout(dependency.reference, tmp_dir)
            revision = git.rev_parse(dependency.reference, tmp_dir).strip()

            if dependency.tag or dependency.rev:
                revision = dependency.reference

            pyproject = TomlFile(tmp_dir / 'pyproject.toml')
            pyproject_content = None
            has_poetry = False
            if pyproject.exists():
                pyproject_content = pyproject.read(True)
                has_poetry = ('tool' in pyproject_content
                              and 'poetry' in pyproject_content['tool'])

            if pyproject_content and has_poetry:
                # If a pyproject.toml file exists
                # We use it to get the information we need
                info = pyproject_content['tool']['poetry']

                name = info['name']
                version = info['version']
                package = Package(name, version, version)
                for req_name, req_constraint in info['dependencies'].items():
                    if req_name == 'python':
                        package.python_versions = req_constraint
                        continue

                    package.add_dependency(req_name, req_constraint)
            else:
                # We need to use setup.py here
                # to figure the information we need
                # We need to place ourselves in the proper
                # folder for it to work
                current_dir = os.getcwd()
                os.chdir(tmp_dir.as_posix())

                try:
                    venv = Venv.create(self._io)
                    output = venv.run('python', 'setup.py', '--name',
                                      '--version')
                    output = output.split('\n')
                    name = output[-3]
                    version = output[-2]
                    package = Package(name, version, version)
                    # Figure out a way to get requirements
                except Exception:
                    raise
                finally:
                    os.chdir(current_dir)

            package.source_type = 'git'
            package.source_url = dependency.source
            package.source_reference = revision
        except Exception:
            raise
        finally:
            shutil.rmtree(tmp_dir.as_posix())

        return [package]
Beispiel #10
0
    def create(cls, path=None):
        path = path or os.getcwd()
        pyproject_file = Path(path)

        if pyproject_file.name != "pyproject.toml":
            pyproject_file = pyproject_file / "pyproject.toml"

        if not pyproject_file.exists():
            raise RuntimeError(
                "Jetty could not find a pyproject.toml file in {}".format(
                    path))

        local_config = TomlFile(pyproject_file.as_posix()).read()
        tool = local_config.setdefault('tool', {})

        if 'jetty' not in tool and 'poetry' not in tool:
            raise RuntimeError("[tool.jetty] section not found in {}".format(
                pyproject_file.name))

        local_config = merge(tool.get('jetty', {}), tool.get('poetry', {}))

        # Checking validity
        cls.check(local_config)

        # Load package
        name = local_config.get('name', pyproject_file.parent.name)
        version = local_config.get('version', '0')
        package = ProjectPackage(name, version, version)

        if 'dependencies' in local_config:
            for name, constraint in local_config['dependencies'].items():
                if name.lower() == 'python':
                    package.python_versions = constraint
                    continue

                if isinstance(constraint, list):
                    for _constraint in constraint:
                        package.add_dependency(name, _constraint)

                    continue

                package.add_dependency(name, constraint)

        if 'dev-dependencies' in local_config:
            for name, constraint in local_config['dev-dependencies'].items():
                if isinstance(constraint, list):
                    for _constraint in constraint:
                        package.add_dependency(name,
                                               _constraint,
                                               category='dev')

                    continue

                package.add_dependency(name, constraint, category='dev')

        extras = local_config.get("extras", {})
        for extra_name, requirements in extras.items():
            package.extras[extra_name] = []

            # Checking for dependency
            for req in requirements:
                req = Dependency(req, "*")

                for dep in package.requires:
                    if dep.name == req.name:
                        dep.in_extras.append(extra_name)
                        package.extras[extra_name].append(dep)

                        break

        lock = pyproject_file.parent / "poetry.lock"
        locker = Locker(lock, local_config)
        return cls(pyproject_file, local_config, package, locker)
Beispiel #11
0
    def complete_package(
            self, package):  # type: (DependencyPackage) -> DependencyPackage
        if package.is_root():
            return package

        if package.source_type not in {"directory", "file", "git"}:
            package = DependencyPackage(
                package.dependency,
                self._pool.package(package.name,
                                   package.version.text,
                                   extras=package.requires_extras),
            )

        dependencies = [
            r for r in package.requires
            if self._package.python_constraint.allows_any(r.python_constraint)
        ]

        # Searching for duplicate dependencies
        #
        # If the duplicate dependencies have the same constraint,
        # the requirements will be merged.
        #
        # For instance:
        #   - enum34; python_version=="2.7"
        #   - enum34; python_version=="3.3"
        #
        # will become:
        #   - enum34; python_version=="2.7" or python_version=="3.3"
        #
        # If the duplicate dependencies have different constraints
        # we have to split the dependency graph.
        #
        # An example of this is:
        #   - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6"
        #   - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6"
        if not package.is_root():
            duplicates = {}
            for dep in dependencies:
                if dep.name not in duplicates:
                    duplicates[dep.name] = []

                duplicates[dep.name].append(dep)

            dependencies = []
            for dep_name, deps in duplicates.items():
                if len(deps) == 1:
                    dependencies.append(deps[0])
                    continue

                self.debug(
                    "<debug>Duplicate dependencies for {}</debug>".format(
                        dep_name))

                # Regrouping by constraint
                by_constraint = {}
                for dep in deps:
                    if dep.constraint not in by_constraint:
                        by_constraint[dep.constraint] = []

                    by_constraint[dep.constraint].append(dep)

                # We merge by constraint
                for constraint, _deps in by_constraint.items():
                    new_markers = []
                    for dep in _deps:
                        pep_508_dep = dep.to_pep_508(False)
                        if ";" not in pep_508_dep:
                            continue

                        markers = pep_508_dep.split(";")[1].strip()
                        if not markers:
                            # One of the constraint has no markers
                            # so this means we don't actually need to merge
                            new_markers = []
                            break

                        new_markers.append("({})".format(markers))

                    if not new_markers:
                        dependencies += _deps
                        continue

                    dep = _deps[0]
                    new_requirement = "{}; {}".format(
                        dep.to_pep_508(False).split(";")[0],
                        " or ".join(new_markers))
                    new_dep = dependency_from_pep_508(new_requirement)
                    if dep.is_optional() and not dep.is_activated():
                        new_dep.deactivate()
                    else:
                        new_dep.activate()

                    by_constraint[constraint] = [new_dep]

                    continue

                if len(by_constraint) == 1:
                    self.debug(
                        "<debug>Merging requirements for {}</debug>".format(
                            str(deps[0])))
                    dependencies.append(list(by_constraint.values())[0][0])
                    continue

                # We leave dependencies as-is if they have the same
                # python/platform constraints.
                # That way the resolver will pickup the conflict
                # and display a proper error.
                _deps = [value[0] for value in by_constraint.values()]
                seen = set()
                for _dep in _deps:
                    pep_508_dep = _dep.to_pep_508(False)
                    if ";" not in pep_508_dep:
                        _requirements = ""
                    else:
                        _requirements = pep_508_dep.split(";")[1].strip()

                    if _requirements not in seen:
                        seen.add(_requirements)

                if len(_deps) != len(seen):
                    for _dep in _deps:
                        dependencies.append(_dep)

                    continue

                # At this point, we raise an exception that will
                # tell the solver to enter compatibility mode
                # which means it will resolve for subsets
                # Python constraints
                #
                # For instance, if our root package requires Python ~2.7 || ^3.6
                # And we have one dependency that requires Python <3.6
                # and the other Python >=3.6 than the solver will solve
                # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6
                # and Python >=3.6,<4.0
                python_constraints = []
                for constraint, _deps in by_constraint.items():
                    python_constraints.append(_deps[0].python_versions)

                _deps = [str(_dep[0]) for _dep in by_constraint.values()]
                self.debug(
                    "<warning>Different requirements found for {}.</warning>".
                    format(", ".join(_deps[:-1]) + " and " + _deps[-1]))
                raise CompatibilityError(*python_constraints)

        # Modifying dependencies as needed
        for dep in dependencies:
            if not package.dependency.python_constraint.is_any():
                dep.transitive_python_versions = str(
                    dep.python_constraint.intersect(
                        package.dependency.python_constraint))

            if package.dependency.is_directory() and dep.is_directory():
                if dep.package.source_url.startswith(package.source_url):
                    relative = (Path(package.source_url) /
                                dep.package.source_url).relative_to(
                                    package.source_url)
                else:
                    relative = Path(
                        package.source_url) / dep.package.source_url

                dep.package.source_url = relative.as_posix()

        package.requires = dependencies

        return package