Ejemplo n.º 1
0
    def build_constraints(self):
        # type: () -> pkg_resources.Requirement
        result = None

        for rdep_node in self.reverse_deps:
            assert (
                rdep_node.metadata is not None
            ), "Reverse dependency should already have a solution"
            all_reqs = set(rdep_node.metadata.requires())
            for extra in rdep_node.extras:
                all_reqs |= set(rdep_node.metadata.requires(extra=extra))
            for req in all_reqs:
                if normalize_project_name(req.project_name) == self.key:
                    result = merge_requirements(result, req)

        if result is None:
            if self.metadata is None:
                result = parse_requirement(self.key)
            else:
                result = parse_requirement(self.metadata.name)
            assert result is not None

            if self.extras:
                result.extras = self.extras
                # Reparse to create a correct hash
                result = parse_requirement(str(result))
                assert result is not None
        return result
Ejemplo n.º 2
0
 def _create_stdin_input_req(line):
     try:
         result = _create_req_from_path(line)
         extra_sources.append(line)
         return utils.parse_requirement(
             "{}=={}".format(*result.to_definition(None))
         )
     except ValueError:
         return utils.parse_requirement(line)
Ejemplo n.º 3
0
def compile_roots(
    node,  # type: DependencyNode
    source,  # type: Optional[DependencyNode]
    repo,  # type: BaseRepository
    dists,  # type: DistributionCollection
    options,  # type: CompileOptions
    depth=1,  # type: int
    max_downgrade=MAX_DOWNGRADE,  # type: int
    _path=None,
):  # pylint: disable=too-many-statements,too-many-locals,too-many-branches
    # type: (...) -> None
    """
    Args:
        node: The node to compile
        source: The source node of this provided node. This is used to build the graph
        repo: The repository to provide candidates.
        dists: The solution that is being built incrementally
        options: Static options for the compile (including extras)
        depth: Depth the compilation has descended into
        max_downgrade: The maximum number of version downgrades that will be allowed for conflicts
        _path: The path back to root - all nodes along the way
    """
    if _path is None:
        _path = set()

    logger = LOG
    logger.debug("Processing node %s", node)
    if node.key in dists and dists[node.key] is not node:
        logger.debug("No need to process this node, it has been removed")
    elif node.metadata is not None:
        if not node.complete:
            if depth > MAX_COMPILE_DEPTH:
                raise ValueError("Recursion too deep")
            try:
                for req in sorted(node.dependencies,
                                  key=lambda node: node.key):
                    if req in _path:
                        if options.allow_circular_dependencies:
                            logger.error(
                                "Skipping node %s because it includes this node",
                                req,
                            )
                        else:
                            raise ValueError(
                                "Circular dependency: {node} -> {req} -> {node}"
                                .format(
                                    node=node,
                                    req=req,
                                ))
                    else:
                        compile_roots(
                            req,
                            node,
                            repo,
                            dists,
                            options,
                            depth=depth + 1,
                            max_downgrade=max_downgrade,
                            _path=_path | {node},
                        )
                node.complete = True

            except NoCandidateException:
                if max_downgrade == 0:
                    raise
                compile_roots(
                    node,
                    source,
                    repo,
                    dists,
                    options,
                    depth=depth,
                    max_downgrade=0,
                    _path=_path,
                )
        else:
            logger.info("Reusing dist %s %s", node.metadata.name,
                        node.metadata.version)
    else:
        spec_req = node.build_constraints()

        if options.pinned_requirements:
            pin = options.pinned_requirements.get(
                normalize_project_name(spec_req.project_name), spec_req)
            spec_req = merge_requirements(spec_req, pin)

        try:
            metadata, cached = repo.get_candidate(spec_req,
                                                  max_downgrade=max_downgrade)
            logger.debug(
                "Acquired candidate %s %s [%s] (%s)",
                metadata,
                spec_req,
                metadata.origin,
                "cached" if cached else "download",
            )
            reason = None
            if source is not None:
                if node in source.dependencies:
                    reason = source.dependencies[node]
                    if options.extras and isinstance(metadata.origin,
                                                     SourceRepository):
                        reason = merge_requirements(
                            reason,
                            parse_requirement(reason.project_name + "[" +
                                              ",".join(options.extras) + "]"),
                        )

            nodes_to_recurse = dists.add_dist(metadata, source, reason)
            for recurse_node in sorted(nodes_to_recurse):
                compile_roots(
                    recurse_node,
                    source,
                    repo,
                    dists,
                    options,
                    depth=depth + 1,
                    max_downgrade=max_downgrade,
                    _path=_path,
                )
        except NoCandidateException:
            if max_downgrade == 0:
                raise

            exc_info = sys.exc_info()

            nodes = sorted(node.reverse_deps)

            violate_score = defaultdict(int)  # type: Dict[DependencyNode, int]
            for idx, revnode in enumerate(nodes):
                for next_node in nodes[idx + 1:]:
                    if not is_possible(
                            merge_requirements(revnode.dependencies[node],
                                               next_node.dependencies[node])):
                        logger.error(
                            "Requirement %s was not possible. Violating pair: %s %s",
                            node.build_constraints(),
                            revnode,
                            next_node,
                        )
                        violate_score[revnode] += 1
                        violate_score[next_node] += 1

            try:
                baddest_node = next(
                    node for node, _ in sorted(violate_score.items(),
                                               key=operator.itemgetter(1))
                    if node.metadata is not None and not node.metadata.meta)
            except StopIteration:
                six.reraise(*exc_info)

            bad_meta = baddest_node.metadata
            assert bad_meta is not None
            logger.debug("The node %s had the most conflicts", baddest_node)

            new_constraints = [
                parse_requirement("{}!={}".format(bad_meta.name,
                                                  bad_meta.version))
            ]
            bad_constraint = req_compile.containers.DistInfo(
                "#bad#-{}-{}".format(baddest_node, depth),
                parse_version("0.0.0"),
                new_constraints,
                meta=True,
            )
            dists.remove_dists(baddest_node, remove_upstream=False)
            baddest_node.complete = False
            dists.remove_dists(node, remove_upstream=False)
            node.complete = False

            bad_constraints = dists.add_dist(bad_constraint, None, None)
            try:
                logger.debug("Finding new solutions for %s and %s", node,
                             baddest_node)
                for node_to_compile in (node, baddest_node):
                    compile_roots(
                        node_to_compile,
                        None,
                        repo,
                        dists,
                        options,
                        depth=depth,
                        max_downgrade=max_downgrade - 1,
                        _path=_path,
                    )

                print(
                    "Could not use {} {} - pin to this version to see why not".
                    format(bad_meta.name, bad_meta.version),
                    file=sys.stderr,
                )
            finally:
                dists.remove_dists(bad_constraints, remove_upstream=True)
Ejemplo n.º 4
0
def parse_req_with_marker(req_str, marker):
    return utils.parse_requirement(
        req_str + " and {}".format(marker)
        if ";" in req_str
        else req_str + "; {}".format(marker)
    )
Ejemplo n.º 5
0
 def build(self, roots):
     results = self.generate_lines(roots)
     return [
         parse_requirement("==".join([result[0][0], str(result[0][1])]))
         for result in results
     ]