Пример #1
0
    def get_requirement_info_by_name(self, req, requirements_by_name=dict()):
        """ Given a requirement, it provides its valid information to be included in the final file """

        if self.use_uniform_requirements:
            # If it is a repo and not a package, this will be True
            if req.link:
                requirement_name = parse_requirement(_get_valid_link_req(req.req)).name
            else:
                requirement_name = parse_requirement(str(req.req)).name

            # If the requirement is not in the dict, it is because it was not needed as a dependency in the original
            # set containing all requirements
            if requirement_name in requirements_by_name.keys():
                # In case we want to show the source for inherited dependencies
                if self.show_comes_from_info and type(req.comes_from) == InstallRequirement:
                    return "{}\t\t\t# From: {}".format(requirements_by_name[requirement_name].requirement,
                                                       str(req.comes_from.req))
                else:
                    return requirements_by_name[requirement_name].requirement

            else:
                return None
        else:
            # If not using uniform versions, we just need to provide the information based on wether it is
            # a repository or a package
            if self.show_comes_from_info and type(req.comes_from) == InstallRequirement:
                return "{}\t\t\t# From: {}".format(format_requirement(req) if not req.link else str(req.req),
                                                   str(req.comes_from.req))
            else:
                return format_requirement(req) if not req.link else str(req.req)
Пример #2
0
        def parse_requires_data(data):
            """Create a list of dependencies from a requires.txt file.

            *data*: the contents of a setuptools-produced requires.txt file.
            """
            reqs = []
            lines = data.splitlines()
            for line in lines:
                line = line.strip()
                if line.startswith('['):
                    logger.warning('Unexpected line: quitting requirement scan: %r',
                                   line)
                    break
                r = parse_requirement(line)
                if not r:
                    logger.warning('Not recognised as a requirement: %r', line)
                    continue
                if r.extras:
                    logger.warning('extra requirements in requires.txt are '
                                   'not supported')
                if not r.constraints:
                    reqs.append(r.name)
                else:
                    cons = ', '.join('%s%s' % c for c in r.constraints)
                    reqs.append('%s (%s)' % (r.name, cons))
            return reqs
Пример #3
0
    def matches_requirement(self, req):
        """
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        """
        # Requirement may contain extras - parse to lose those
        # from what's passed to the matcher
        r = parse_requirement(req)
        scheme = get_scheme(self.metadata.scheme)
        try:
            matcher = scheme.matcher(r.requirement)
        except UnsupportedVersionError:
            # XXX compat-mode if cannot read the version
            logger.warning('could not read version %r - using name only',
                           req)
            name = req.split()[0]
            matcher = scheme.matcher(name)

        name = matcher.key   # case-insensitive

        result = False
        for p in self.provides:
            p_name, p_ver = parse_name_and_version(p)
            if p_name != name:
                continue
            try:
                result = matcher.match(p_ver)
                break
            except UnsupportedVersionError:
                pass
        return result
Пример #4
0
    def set_theme(self, theme):
        """
        Sets the theme. Prefer using the .theme property.

        :param theme: Requirement for theme
        """
        self._theme = parse_requirement(theme)
Пример #5
0
    def set_theme(self, theme):
        """
        Sets the theme. Prefer using the .theme property.

        :param theme: Requirement for theme
        """
        self._theme = parse_requirement(theme)
Пример #6
0
def _get_valid_link_req(req):
    """ Formats the repo based dependencies, which can be dirty in case of collision between repo based and package
     based declarations (for similar packages) """

    tokens = str(req).split("@")
    req_name = parse_requirement(tokens[0])

    return req_name.name + "@ " + "@".join(tokens[1:])
Пример #7
0
 def __add_requirements(self, reqs, extra=None):
     if len(reqs):
         if extra not in self._requirements:
             self._requirements[extra] = {}
     for req in reqs:
         req = parse_requirement(req)
         if req.name in self._requirements[extra]:
             raise ValueError("Duplicate requirement for {}.".format(req.name))
         self._requirements[extra][req.name] = req
Пример #8
0
    def check_duplicate_dependencies_nonuniform(self, extra, resolver):
        """ Checks there are not duplicate dependencies, when use_uniform_requirements==False """
        requirements_by_name = {}
        for req in resolver.resolve(max_rounds=10):
            requirements_by_name[parse_requirement(str(req.req)).name] = SimpleNamespace(
                requirement=format_requirement(req).strip().replace(" ", ""),
                url=req.link
            )

        self._check_duplicate_dependencies_by_extra(extra, requirements_by_name)
Пример #9
0
    def locate(self, requirement, prereleases=False):
        """
        Find the most recent distribution which matches the given
        requirement.

        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
                            'foo (>= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        """
        result = None
        r = parse_requirement(requirement)
        if r is None:
            raise DistlibException('Not a valid requirement: %r' % requirement)
        scheme = get_scheme(self.scheme)
        self.matcher = matcher = scheme.matcher(r.requirement)
        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
        versions = self.get_project(r.name)
        if versions:
            # sometimes, versions are invalid
            slist = []
            vcls = matcher.version_class
            for k in versions:
                try:
                    if not matcher.match(k):
                        logger.debug('%s did not match %r', matcher, k)
                    else:
                        if prereleases or not vcls(k).is_prerelease:
                            slist.append(k)
                        else:
                            logger.debug(
                                'skipping pre-release '
                                'version %s of %s', k, matcher.name)
                except Exception:
                    logger.warning('error matching %s with %r', matcher, k)
                    pass  # slist.append(k)
            if len(slist) > 1:
                slist = sorted(slist, key=scheme.key)
            if slist:
                logger.debug('sorted list: %s', slist)
                result = versions[slist[-1]]
        if result and r.extras:
            result.extras = r.extras
        self.matcher = None
        return result
Пример #10
0
    def locate(self, requirement, prereleases=False):
        """
        Find the most recent distribution which matches the given
        requirement.

        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
                            'foo (>= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        """
        result = None
        r = parse_requirement(requirement)
        if r is None:
            raise DistlibException('Not a valid requirement: %r' % requirement)
        scheme = get_scheme(self.scheme)
        self.matcher = matcher = scheme.matcher(r.requirement)
        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
        versions = self.get_project(r.name)
        if versions:
            # sometimes, versions are invalid
            slist = []
            vcls = matcher.version_class
            for k in versions:
                try:
                    if not matcher.match(k):
                        logger.debug('%s did not match %r', matcher, k)
                    else:
                        if prereleases or not vcls(k).is_prerelease:
                            slist.append(k)
                        else:
                            logger.debug('skipping pre-release '
                                         'version %s of %s', k, matcher.name)
                except Exception:
                    logger.warning('error matching %s with %r', matcher, k)
                    pass # slist.append(k)
            if len(slist) > 1:
                slist = sorted(slist, key=scheme.key)
            if slist:
                logger.debug('sorted list: %s', slist)
                result = versions[slist[-1]]
        if result and r.extras:
            result.extras = r.extras
        self.matcher = None
        return result
Пример #11
0
    def can_be_updated(cls, dist, latest_version):
        """Determine whether package can be updated or not."""
        scheme = get_scheme('default')
        name = dist.project_name
        dependants = cls.get_dependants(name)
        for dependant in dependants:
            requires = dependant.requires()
            for requirement in cls.get_requirement(name, requires):
                req = parse_requirement(requirement)
                # Ignore error if version in requirement spec can't be parsed
                try:
                    matcher = scheme.matcher(req.requirement)
                except UnsupportedVersionError:
                    continue
                if not matcher.match(str(latest_version)):
                    return False

        return True
Пример #12
0
 def set_theme(self, theme):
     self._theme = parse_requirement(theme)
Пример #13
0
        def get_python_rec(anchor_file):
            package_type, sp_reference, pydist = get_pydist(anchor_file)
            if pydist is None:
                return None
            # x.provides  =>  [u'skdata (0.0.4)']
            # x.run_requires  =>  set([u'joblib', u'scikit-learn', u'lockfile', u'numpy', u'nose (>=1.0)'])  # NOQA
            # >>> list(x.list_installed_files())  =>  [(u'skdata/__init__.py', u'sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU', u'0'), (u'skdata/base.py', u'sha256=04MW02dky5T4nZb6Q0M351aRbAwLxd8voCK3nrAU-g0', u'5019'), (u'skdata/brodatz.py', u'sha256=NIPWLawJ59Fr037r0oT_gHe46WCo3UivuQ-cwxRU3ow', u'8492'), (u'skdata/caltech.py', u'sha256=cIfyMMRYggZ3Jkgc15tsYi_ZsZ7NpRqWh7mZ8bl6Fo0', u'8047'), (u'skdata/data_home.py', u'sha256=o5ChOI4v3Jd16JM3qWZlhrs5q-g_0yKa5-Oq44HC_K4', u'1297'), (u'skdata/diabetes.py', u'sha256=ny5Ihpc_eiIRYgzFn3Lm81fV0SZ1nyZQnqEmwb2PrS0', u'995'), (u'skdata/digits.py', u'sha256=DipeWAb3APpjXfmKmSumkfEFzuBW8XJ0  # NOQA

            # TODO: normalize names against '.', '-', '_'
            # TODO: ensure that this dist is *actually* the dist that matches conda-meta

            if package_type == PackageType.SHADOW_PYTHON_EGG_INFO_FILE:
                paths_data = None
            elif package_type == PackageType.SHADOW_PYTHON_DIST_INFO:
                _paths_data = []
                for _path, _hash, _size in pydist.list_installed_files():
                    if _hash:
                        assert _hash.startswith('sha256='), (anchor_file, _hash)
                        sha256 = _hash[7:]
                    else:
                        sha256 = None
                    _size = int(_size) if _size else None
                    _paths_data.append(PathDataV1(
                        _path=_path,
                        path_type=PathType.hardlink,
                        sha256=sha256,
                        size_in_bytes=_size
                    ))
                paths_data = PathsData(paths_version=1, paths=_paths_data)
            elif package_type == PackageType.SHADOW_PYTHON_EGG_INFO_DIR:
                _paths_data = []
                # TODO: Don't use list_installed_files() here. Read SOURCES.txt directly.
                for _path, _, _ in pydist.list_installed_files():
                    _paths_data.append(PathData(
                        _path=_path,
                        path_type=PathType.hardlink,
                    ))
                paths_data = PathsData(paths_version=1, paths=_paths_data)
            else:
                raise NotImplementedError()

            # TODO: need to add entry points, "exports," and other files that might not be in RECORD  # NOQA

            depends = tuple(
                req.name for req in
                # vars(req) => {'source': u'nose (>=1.0)', 'requirement': u'nose (>= 1.0)', 'extras': None, 'name': u'nose', 'url': None, 'constraints': [(u'>=', u'1.0')]}  # NOQA
                (parse_requirement(r) for r in pydist.run_requires)
            )
            # TODO: need to add python (with version?) to deps

            python_rec = PrefixRecord(
                package_type=package_type,
                namespace='python',
                name=pydist.name.lower(),
                version=pydist.version,
                channel=Channel('pypi'),
                subdir='pypi',
                fn=sp_reference,
                build='pypi_0',
                build_number=0,
                paths_data=paths_data,
                depends=depends,
            )
            return python_rec
Пример #14
0
        def get_python_rec(anchor_file):
            package_type, sp_reference, pydist = get_pydist(anchor_file)
            if pydist is None:
                return None
            # x.provides  =>  [u'skdata (0.0.4)']
            # x.run_requires  =>  set([u'joblib', u'scikit-learn', u'lockfile', u'numpy', u'nose (>=1.0)'])  # NOQA
            # >>> list(x.list_installed_files())  =>  [(u'skdata/__init__.py', u'sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU', u'0'), (u'skdata/base.py', u'sha256=04MW02dky5T4nZb6Q0M351aRbAwLxd8voCK3nrAU-g0', u'5019'), (u'skdata/brodatz.py', u'sha256=NIPWLawJ59Fr037r0oT_gHe46WCo3UivuQ-cwxRU3ow', u'8492'), (u'skdata/caltech.py', u'sha256=cIfyMMRYggZ3Jkgc15tsYi_ZsZ7NpRqWh7mZ8bl6Fo0', u'8047'), (u'skdata/data_home.py', u'sha256=o5ChOI4v3Jd16JM3qWZlhrs5q-g_0yKa5-Oq44HC_K4', u'1297'), (u'skdata/diabetes.py', u'sha256=ny5Ihpc_eiIRYgzFn3Lm81fV0SZ1nyZQnqEmwb2PrS0', u'995'), (u'skdata/digits.py', u'sha256=DipeWAb3APpjXfmKmSumkfEFzuBW8XJ0  # NOQA

            # TODO: normalize names against '.', '-', '_'
            # TODO: ensure that this dist is *actually* the dist that matches conda-meta

            if package_type == PackageType.SHADOW_PYTHON_EGG_INFO_FILE:
                paths_data = None
            elif package_type == PackageType.SHADOW_PYTHON_DIST_INFO:
                _paths_data = []
                for _path, _hash, _size in pydist.list_installed_files():
                    if _hash:
                        assert _hash.startswith('sha256='), (anchor_file, _hash)
                        sha256 = _hash[7:]
                    else:
                        sha256 = None
                    _size = int(_size) if _size else None
                    _paths_data.append(PathDataV1(
                        _path=_path,
                        path_type=PathType.hardlink,
                        sha256=sha256,
                        size_in_bytes=_size
                    ))
                paths_data = PathsData(paths_version=1, paths=_paths_data)
            elif package_type == PackageType.SHADOW_PYTHON_EGG_INFO_DIR:
                _paths_data = []
                # TODO: Don't use list_installed_files() here. Read SOURCES.txt directly.
                for _path, _, _ in pydist.list_installed_files():
                    _paths_data.append(PathData(
                        _path=_path,
                        path_type=PathType.hardlink,
                    ))
                paths_data = PathsData(paths_version=1, paths=_paths_data)
            else:
                raise NotImplementedError()

            # TODO: need to add entry points, "exports," and other files that might not be in RECORD  # NOQA

            depends = tuple(
                req.name for req in
                # vars(req) => {'source': u'nose (>=1.0)', 'requirement': u'nose (>= 1.0)', 'extras': None, 'name': u'nose', 'url': None, 'constraints': [(u'>=', u'1.0')]}  # NOQA
                (parse_requirement(r) for r in pydist.run_requires)
            )
            # TODO: need to add python (with version?) to deps

            python_rec = PrefixRecord(
                package_type=package_type,
                name=pydist.name.lower(),
                version=pydist.version,
                channel=Channel('pypi'),
                subdir='pypi',
                fn=sp_reference,
                build='pypi_0',
                build_number=0,
                paths_data=paths_data,
                depends=depends,
            )
            return python_rec
Пример #15
0
    def on_end(self, event):

        # Our config object
        python_config = event.config["python"]

        # Pip / PyPI
        repository = PyPIRepository([], cache_dir=CACHE_DIR)

        # We just need to construct this structure if use_uniform_requirements == True
        requirements_by_name = {}

        if python_config.use_uniform_requirements:
            tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
            for extra in itertools.chain((None,), python_config.get_extras()):
                tmpfile.write("\n".join(python_config.get_requirements(extra=extra)) + "\n")
                tmpfile.flush()

            constraints = list(
                parse_requirements(
                    tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options
                )
            )

            # This resolver is able to evaluate ALL the dependencies along the extras
            resolver = Resolver(
                constraints,
                repository,
                cache=DependencyCache(CACHE_DIR),
                # cache=DependencyCache(tempfile.tempdir),
                prereleases=False,
                clear_caches=False,
                allow_unsafe=False,
            )

            for req in resolver.resolve(max_rounds=10):
                requirements_by_name[parse_requirement(str(req.req)).name] = SimpleNamespace(
                    requirement=format_requirement(req).strip().replace(" ", ""),
                    url=req.link
                )

            python_config.check_duplicate_dependencies_uniform(requirements_by_name)

        # Now it iterates along the versions in extras and looks for the requirements and its dependencies, using the
        # structure created above to select the unified versions (unless the flag indicates otherwise).
        for extra in itertools.chain((None,), python_config.get_extras()):
            requirements_file = "requirements{}.txt".format("-" + extra if extra else "")

            if python_config.override_requirements or not os.path.exists(requirements_file):
                tmpfile = tempfile.NamedTemporaryFile(mode="wt", delete=False)
                tmpfile.write("\n".join(python_config.get_requirements(extra=extra)) + "\n")
                tmpfile.flush()

                constraints = list(
                    parse_requirements(
                        tmpfile.name, finder=repository.finder, session=repository.session, options=repository.options
                    )
                )
                resolver = Resolver(
                    constraints,
                    repository,
                    cache=DependencyCache(CACHE_DIR),
                    prereleases=False,
                    clear_caches=False,
                    allow_unsafe=False,
                )

                if not python_config.use_uniform_requirements:
                    python_config.check_duplicate_dependencies_nonuniform(extra, resolver)

                requirements_list = []
                for req in resolver.resolve(max_rounds=10):
                    if req.name != python_config.get("name"):
                        requirement = python_config.get_requirement_info_by_name(req, requirements_by_name)
                        if requirement:
                            requirements_list.append(requirement)

                self.render_file_inline(
                    requirements_file,
                    "\n".join(
                        (
                            "-e .{}".format("[" + extra + "]" if extra else ""),
                            *(("-r requirements.txt",) if extra else ()),
                            *python_config.get_vendors(extra=extra),
                            *sorted(requirements_list),
                        )
                    ),
                    override=python_config.override_requirements,
                )

        # Updates setup file
        setup = python_config.get_setup()

        context = {
            "url": setup.pop("url", ""),
            "download_url": setup.pop("download_url", ""),
        }

        for k, v in context.items():
            context[k] = context[k].format(name=setup["name"], user=getuser(), version="{version}")

        context.update(
            {
                "entry_points": setup.pop("entry_points", {}),
                "extras_require": python_config.get("extras_require"),
                "install_requires": python_config.get("install_requires"),
                "python": python_config,
                "setup": setup,
                "banner": get_override_warning_banner(),
            }
        )

        # Render (with overwriting) the allmighty setup.py
        self.render_file("setup.py", "python/setup.py.j2", context, override=True)