コード例 #1
0
    def get_candidates(self, req):
        project_name = None
        if req is not None:
            project_name = utils.normalize_project_name(req.name)
        results = []
        for candidate in self.links:
            if (req is None or utils.normalize_project_name(candidate.name)
                    == project_name):
                results.append(candidate)

        return results
コード例 #2
0
    def build_constraints(self):
        # type: () -> pkg_resources.Requirement
        result = None

        for rdep_node in self.reverse_deps:
            assert (
                rdep_node.metadata is not None
            ), "Reverse dependency should already have a solution"
            all_reqs = set(rdep_node.metadata.requires())
            for extra in rdep_node.extras:
                all_reqs |= set(rdep_node.metadata.requires(extra=extra))
            for req in all_reqs:
                if normalize_project_name(req.project_name) == self.key:
                    result = merge_requirements(result, req)

        if result is None:
            if self.metadata is None:
                result = parse_requirement(self.key)
            else:
                result = parse_requirement(self.metadata.name)
            assert result is not None

            if self.extras:
                result.extras = self.extras
                # Reparse to create a correct hash
                result = parse_requirement(str(result))
                assert result is not None
        return result
コード例 #3
0
def _build_constraints(root_node):
    # type: (DependencyNode) -> Iterable[str]
    constraints = []  # type: List[str]
    for node in root_node.reverse_deps:
        assert (
            node.metadata is not None
        ), "Reverse dependency should already have a solution"
        all_reqs = set(node.metadata.requires())
        for extra in node.extras:
            all_reqs |= set(node.metadata.requires(extra=extra))
        for req in all_reqs:
            if normalize_project_name(req.project_name) == root_node.key:
                _process_constraint_req(req, node, constraints)
    return constraints
コード例 #4
0
ファイル: source.py プロジェクト: sputt/req-compile
 def _add_distribution(self, source_dir, result):
     # type: (str, RequirementContainer) -> None
     if result.version is None:
         self.logger.debug("Source dir %s did not provide a version")
         result.version = parse_version("0")
     candidate = req_compile.repos.repository.Candidate(
         result.name,
         source_dir,
         result.version,
         None,
         None,
         "any",
         None,
         req_compile.repos.repository.DistributionType.SOURCE,
     )
     candidate.preparsed = result
     self.distributions[utils.normalize_project_name(
         result.name)].append(candidate)
コード例 #5
0
ファイル: source.py プロジェクト: sputt/req-compile
    def get_candidates(self, req):
        if req is None:
            return itertools.chain(*self.distributions.values())

        project_name = utils.normalize_project_name(req.name)
        return self.distributions.get(project_name, [])
コード例 #6
0
ファイル: compile.py プロジェクト: sputt/req-compile
def compile_roots(
    node,  # type: DependencyNode
    source,  # type: Optional[DependencyNode]
    repo,  # type: BaseRepository
    dists,  # type: DistributionCollection
    options,  # type: CompileOptions
    depth=1,  # type: int
    max_downgrade=MAX_DOWNGRADE,  # type: int
    _path=None,
):  # pylint: disable=too-many-statements,too-many-locals,too-many-branches
    # type: (...) -> None
    """
    Args:
        node: The node to compile
        source: The source node of this provided node. This is used to build the graph
        repo: The repository to provide candidates.
        dists: The solution that is being built incrementally
        options: Static options for the compile (including extras)
        depth: Depth the compilation has descended into
        max_downgrade: The maximum number of version downgrades that will be allowed for conflicts
        _path: The path back to root - all nodes along the way
    """
    if _path is None:
        _path = set()

    logger = LOG
    logger.debug("Processing node %s", node)
    if node.key in dists and dists[node.key] is not node:
        logger.debug("No need to process this node, it has been removed")
    elif node.metadata is not None:
        if not node.complete:
            if depth > MAX_COMPILE_DEPTH:
                raise ValueError("Recursion too deep")
            try:
                for req in sorted(node.dependencies,
                                  key=lambda node: node.key):
                    if req in _path:
                        if options.allow_circular_dependencies:
                            logger.error(
                                "Skipping node %s because it includes this node",
                                req,
                            )
                        else:
                            raise ValueError(
                                "Circular dependency: {node} -> {req} -> {node}"
                                .format(
                                    node=node,
                                    req=req,
                                ))
                    else:
                        compile_roots(
                            req,
                            node,
                            repo,
                            dists,
                            options,
                            depth=depth + 1,
                            max_downgrade=max_downgrade,
                            _path=_path | {node},
                        )
                node.complete = True

            except NoCandidateException:
                if max_downgrade == 0:
                    raise
                compile_roots(
                    node,
                    source,
                    repo,
                    dists,
                    options,
                    depth=depth,
                    max_downgrade=0,
                    _path=_path,
                )
        else:
            logger.info("Reusing dist %s %s", node.metadata.name,
                        node.metadata.version)
    else:
        spec_req = node.build_constraints()

        if options.pinned_requirements:
            pin = options.pinned_requirements.get(
                normalize_project_name(spec_req.project_name), spec_req)
            spec_req = merge_requirements(spec_req, pin)

        try:
            metadata, cached = repo.get_candidate(spec_req,
                                                  max_downgrade=max_downgrade)
            logger.debug(
                "Acquired candidate %s %s [%s] (%s)",
                metadata,
                spec_req,
                metadata.origin,
                "cached" if cached else "download",
            )
            reason = None
            if source is not None:
                if node in source.dependencies:
                    reason = source.dependencies[node]
                    if options.extras and isinstance(metadata.origin,
                                                     SourceRepository):
                        reason = merge_requirements(
                            reason,
                            parse_requirement(reason.project_name + "[" +
                                              ",".join(options.extras) + "]"),
                        )

            nodes_to_recurse = dists.add_dist(metadata, source, reason)
            for recurse_node in sorted(nodes_to_recurse):
                compile_roots(
                    recurse_node,
                    source,
                    repo,
                    dists,
                    options,
                    depth=depth + 1,
                    max_downgrade=max_downgrade,
                    _path=_path,
                )
        except NoCandidateException:
            if max_downgrade == 0:
                raise

            exc_info = sys.exc_info()

            nodes = sorted(node.reverse_deps)

            violate_score = defaultdict(int)  # type: Dict[DependencyNode, int]
            for idx, revnode in enumerate(nodes):
                for next_node in nodes[idx + 1:]:
                    if not is_possible(
                            merge_requirements(revnode.dependencies[node],
                                               next_node.dependencies[node])):
                        logger.error(
                            "Requirement %s was not possible. Violating pair: %s %s",
                            node.build_constraints(),
                            revnode,
                            next_node,
                        )
                        violate_score[revnode] += 1
                        violate_score[next_node] += 1

            try:
                baddest_node = next(
                    node for node, _ in sorted(violate_score.items(),
                                               key=operator.itemgetter(1))
                    if node.metadata is not None and not node.metadata.meta)
            except StopIteration:
                six.reraise(*exc_info)

            bad_meta = baddest_node.metadata
            assert bad_meta is not None
            logger.debug("The node %s had the most conflicts", baddest_node)

            new_constraints = [
                parse_requirement("{}!={}".format(bad_meta.name,
                                                  bad_meta.version))
            ]
            bad_constraint = req_compile.containers.DistInfo(
                "#bad#-{}-{}".format(baddest_node, depth),
                parse_version("0.0.0"),
                new_constraints,
                meta=True,
            )
            dists.remove_dists(baddest_node, remove_upstream=False)
            baddest_node.complete = False
            dists.remove_dists(node, remove_upstream=False)
            node.complete = False

            bad_constraints = dists.add_dist(bad_constraint, None, None)
            try:
                logger.debug("Finding new solutions for %s and %s", node,
                             baddest_node)
                for node_to_compile in (node, baddest_node):
                    compile_roots(
                        node_to_compile,
                        None,
                        repo,
                        dists,
                        options,
                        depth=depth,
                        max_downgrade=max_downgrade - 1,
                        _path=_path,
                    )

                print(
                    "Could not use {} {} - pin to this version to see why not".
                    format(bad_meta.name, bad_meta.version),
                    file=sys.stderr,
                )
            finally:
                dists.remove_dists(bad_constraints, remove_upstream=True)
コード例 #7
0
ファイル: compile.py プロジェクト: sputt/req-compile
def perform_compile(
        input_reqs,  # type: Iterable[RequirementContainer]
        repo,  # type: BaseRepository
        constraint_reqs=None,  # type: Iterable[RequirementContainer]
        extras=None,  # type: Iterable[str]
        allow_circular_dependencies=True,  # type: bool
):
    # type: (...) -> Tuple[DistributionCollection, Set[DependencyNode]]
    """
    Perform a compilation using the given inputs and constraints

    Args:
        input_reqs:
            List of mapping of input requirements. If provided a mapping,
            requirements will be kept separate during compilation for better
            insight into the resolved requirements
        repo: Repository to use as a source of Python packages.
        extras: Extras to apply automatically to source projects
        constraint_reqs: Constraints to use when compiling
        allow_circular_dependencies: Whether or not to allow circular dependencies
    Returns:
        the solution and root nodes used to generate it
    """
    results = req_compile.dists.DistributionCollection()

    constraint_nodes = set()
    nodes = set()
    all_pinned = True
    pinned_requirements = {}

    if constraint_reqs is not None:
        for constraint_source in constraint_reqs:
            all_pinned &= all(
                [is_pinned_requirement(req) for req in constraint_source])
            if all_pinned:
                for req in constraint_source:
                    pinned_requirements[normalize_project_name(
                        req.project_name)] = req

        if not all_pinned:
            for constraint_source in constraint_reqs:
                constraint_node = results.add_dist(constraint_source, None,
                                                   None)
                constraint_nodes |= constraint_node
                nodes |= constraint_nodes

    roots = set()
    for req_source in input_reqs:
        roots |= results.add_dist(req_source, None, None)

    nodes |= roots

    options = CompileOptions()
    options.allow_circular_dependencies = allow_circular_dependencies
    options.extras = extras

    if all_pinned and constraint_reqs:
        LOG.info(
            "All constraints were pins - no need to solve the constraints")
        options.pinned_requirements = pinned_requirements

    try:
        for node in sorted(nodes):
            compile_roots(node, None, repo, results, options)
    except (NoCandidateException, req_compile.errors.MetadataError) as ex:
        _add_constraints(all_pinned, constraint_reqs, results)
        ex.results = results
        raise

    # Add the constraints in so it will show up as a contributor in the results.
    # The same is done in the exception block above
    _add_constraints(all_pinned, constraint_reqs, results)

    return results, roots
コード例 #8
0
    def do_get_candidate(self,
                         req,
                         candidates,
                         force_allow_prerelease=False,
                         max_downgrade=None):
        # type: (pkg_resources.Requirement, Iterable[Candidate], bool, int) -> Tuple[RequirementContainer, bool]
        """
        Args:
            req: Requirement to fetch candidate for
            candidates: Available candidates (any versions, unsorted)
            force_allow_prerelease: Override the allow prerelease setting
            max_downgrade: Number of different versions to try. Does not limit number of candidates
                per version nor make any judgements about the semver

        Raises:
            NoCandidateException if no candidate could be found, or IO errors related to failing
                to fetch the desired candidate

        Returns:
            The distribution and whether or not it was cached
        """
        allow_prereleases = force_allow_prerelease or self.allow_prerelease
        if candidates:
            filtered_candidates = filter_candidates(
                req, candidates, allow_prereleases=allow_prereleases)
            tried_versions = set()

            for candidate in sort_candidates(filtered_candidates):
                if candidate.version is None:
                    self.logger.warning("Found candidate with no version: %s",
                                        candidate)
                    continue

                if candidate.type == DistributionType.SDIST:
                    self.logger.warning(
                        "Considering source distribution for %s",
                        candidate.name)

                try:
                    dist, cached = self.resolve_candidate(candidate)
                    if dist is not None:
                        if normalize_project_name(
                                candidate.name) == normalize_project_name(
                                    req.project_name):
                            return dist, cached
                except req_compile.errors.MetadataError as ex:
                    self.logger.warning("Could not use candidate %s - %s",
                                        candidate, ex)

                tried_versions.add(candidate.version)
                if max_downgrade is not None and len(
                        tried_versions) >= max_downgrade:
                    break

        if (_is_all_prereleases(candidates)
                or req_compile.utils.has_prerelease(req)
            ) and not allow_prereleases:
            self.logger.debug(
                "No non-prerelease candidates available. Now allowing prereleases"
            )
            return self.do_get_candidate(
                req,
                candidates,
                force_allow_prerelease=True,
                max_downgrade=max_downgrade,
            )

        raise NoCandidateException(req)
コード例 #9
0
def _fetch_from_setup_py(
    source_file, name, version, extractor
):  # pylint: disable=too-many-branches
    """Attempt a set of executions to obtain metadata from the setup.py without having to build
    a wheel.  First attempt without mocking __import__ at all. This means that projects
    which import a package inside of themselves will not succeed, but all other simple
    source distributions will. If this fails, allow mocking of __import__ to extract from
    tar files and zip files.  Imports will trigger files to be extracted and executed.  If
    this fails, due to true build prerequisites not being satisfied or the mocks being
    insufficient, build the wheel and extract the metadata from it.

    Args:
        source_file (str): The source archive or directory
        name (str): The project name. Use if it cannot be determined from the archive
        extractor (Extractor): The extractor to use to obtain files from the archive

    Returns:
        (DistInfo) The resulting distribution metadata
    """
    results = None

    setattr(THREADLOCAL, "curdir", extractor.fake_root)

    def _fake_chdir(new_dir):
        if os.path.isabs(new_dir):
            dir_test = os.path.relpath(new_dir, extractor.fake_root)
            if dir_test != "." and dir_test.startswith("."):
                raise ValueError(
                    "Cannot operate outside of setup dir ({})".format(dir_test)
                )
        elif new_dir == "..":
            new_dir = "/".join(re.split(r"[/\\]", os.getcwd())[:-1])
        setattr(THREADLOCAL, "curdir", os.path.abspath(new_dir))

    def _fake_getcwd():
        return getattr(THREADLOCAL, "curdir")

    def _fake_abspath(path):
        """Return the absolute version of a path."""
        if not os.path.isabs(path):
            if six.PY2 and isinstance(
                path, unicode  # pylint: disable=undefined-variable
            ):
                cwd = os.getcwdu()  # pylint: disable=no-member
            else:
                cwd = os.getcwd()
            path = cwd + "/" + path
        return path

    # fmt: off
    patches = patch(
            os, 'chdir', _fake_chdir,
            os, 'getcwd', _fake_getcwd,
            os, 'getcwdu', _fake_getcwd,
            os.path, 'abspath', _fake_abspath,
    )
    # fmt: on
    with patches:
        setup_file = find_in_archive(extractor, "setup.py", max_depth=1)

        if name == "setuptools":
            LOG.debug("Not running setup.py for setuptools")
            return None

        if setup_file is None:
            LOG.warning(
                "Could not find a setup.py in %s", os.path.basename(source_file)
            )
            return None

        try:
            LOG.info("Parsing setup.py %s", setup_file)
            results = _parse_setup_py(name, setup_file, extractor)
        except (Exception, RuntimeError, ImportError):  # pylint: disable=broad-except
            LOG.warning("Failed to parse %s", name, exc_info=True)

    if results is None:
        results = _build_egg_info(name, extractor, setup_file)

    if results is None or (results.name is None and results.version is None):
        return None

    if results.name is None:
        results.name = name
    if results.version is None or (version and results.version != version):
        LOG.debug(
            "Parsed version of %s did not match filename %s", results.version, version
        )
        results.version = version or utils.parse_version("0.0.0")

    if not isinstance(extractor, NonExtractor) and utils.normalize_project_name(
        results.name
    ) != utils.normalize_project_name(name):
        LOG.warning("Name coming from setup.py does not match: %s", results.name)
        results.name = name
    return results
コード例 #10
0
 def __getitem__(self, project_name):
     req_name = project_name.split("[")[0]
     return self.nodes[normalize_project_name(req_name)]
コード例 #11
0
 def __contains__(self, project_name):
     req_name = project_name.split("[")[0]
     return normalize_project_name(req_name) in self.nodes
コード例 #12
0
 def _build_key(name):
     return normalize_project_name(name)