Exemple #1
0
 def compare_with_working_set(
         self) -> Tuple[List[str], List[str], List[str]]:
     """Compares the candidates and return (to_add, to_update, to_remove)"""
     working_set = self.working_set
     to_update, to_remove = [], []
     candidates = self.candidates.copy()
     environment = self.environment.marker_environment
     for key, dist in working_set.items():
         if key in candidates:
             can = candidates.pop(key)
             if can.marker and not can.marker.evaluate(environment):
                 to_remove.append(key)
             elif not is_dist_editable(dist) and (
                     dist.version != can.version or can.req.editable):
                 to_update.append(key)
             elif is_dist_editable(dist) and not can.req.editable:
                 to_update.append(key)
         elif key not in self.all_candidates and key not in self.SEQUENTIAL_PACKAGES:
             # Remove package only if it is not required by any section
             # Packages for packaging will never be removed
             to_remove.append(key)
     to_add = list({
         strip_extras(name)[0]
         for name, can in candidates.items()
         if not (can.marker and not can.marker.evaluate(environment))
         and strip_extras(name)[0] not in working_set
     })
     return sorted(to_add), sorted(to_update), sorted(to_remove)
Exemple #2
0
    def add_package(key: str, dist: Distribution) -> Package:
        name, extras = strip_extras(key)
        extras = extras or ()
        reqs = {}
        if dist:
            requirements = [
                Requirement.from_pkg_requirement(r) for r in dist.requires(extras)
            ]
            for req in requirements:
                reqs[req.identify()] = req
            version = dist.version
        else:
            version = None

        node = Package(key, version, reqs)
        if node not in graph:
            if extras:
                node_with_extras.add(name)
            graph.add(node)

            for k in reqs:
                child = add_package(k, working_set.get(strip_extras(k)[0]))
                graph.connect(node, child)

        return node
Exemple #3
0
 def compare_with_working_set(
         self) -> Tuple[List[str], List[str], List[str]]:
     """Compares the candidates and return (to_add, to_update, to_remove)"""
     working_set = self.working_set
     to_update, to_remove = [], []
     candidates = self.candidates.copy()
     environment = self.environment.marker_environment
     for key, dist in working_set.items():
         if key in candidates:
             can = candidates.pop(key)
             if can.marker and not can.marker.evaluate(environment):
                 to_remove.append(key)
             elif not is_dist_editable(
                     dist) and dist.version != can.version:
                 # XXX: An editable distribution is always considered as consistent.
                 to_update.append(key)
         elif key not in self.all_candidates and key not in ("wheel",
                                                             "setuptools"):
             # Remove package only if it is not required by any section
             to_remove.append(key)
     to_add = list({
         strip_extras(name)[0]
         for name, can in candidates.items()
         if not (can.marker and not can.marker.evaluate(environment))
         and strip_extras(name)[0] not in working_set
     })
     return to_add, to_update, to_remove
Exemple #4
0
 def compare_with_working_set(
         self) -> Tuple[List[str], List[str], List[str]]:
     """Compares the candidates and return (to_add, to_update, to_remove)"""
     working_set = self.environment.get_working_set()
     to_update, to_remove = [], []
     candidates = self.candidates.copy()
     environment = self.environment.marker_environment
     for key, dist in working_set.items():
         if key not in candidates:
             to_remove.append(key)
         else:
             can = candidates.pop(key)
             if can.marker and not can.marker.evaluate(environment):
                 to_remove.append(key)
             elif not _is_dist_editable(
                     dist) and dist.version != can.version:
                 # XXX: An editable distribution is always considered as consistent.
                 to_update.append(key)
     to_add = list({
         strip_extras(name)[0]
         for name, can in candidates.items()
         if not (can.marker and not can.marker.evaluate(environment))
         and strip_extras(name)[0] not in working_set
     })
     return to_add, to_update, to_remove
Exemple #5
0
    def compare_with_working_set(
            self) -> Tuple[List[str], List[str], List[str]]:
        """Compares the candidates and return (to_add, to_update, to_remove)"""
        working_set = self.working_set
        candidates = self.candidates.copy()
        to_update, to_remove = [], []

        for key, dist in working_set.items():
            if key == self.self_key:
                continue
            if key in candidates:
                can = candidates.pop(key)
                if (can.req.editable or self.reinstall or is_editable(dist)
                        or (dist.version != can.version)):
                    to_update.append(key)
            elif (key not in self.all_candidate_keys
                  and key not in self.SEQUENTIAL_PACKAGES):
                # Remove package only if it is not required by any group
                # Packages for packaging will never be removed
                to_remove.append(key)
        to_add = list({
            strip_extras(name)[0]
            for name, _ in candidates.items() if name != self.self_key
            and strip_extras(name)[0] not in working_set
        })
        return (
            sorted(to_add),
            sorted(to_update),
            sorted(to_remove) if self.clean else [],
        )
Exemple #6
0
    def add_package(key: str, dist: Distribution | None) -> Package:
        name, extras = strip_extras(key)
        extras = extras or ()
        reqs: dict[str, Requirement] = {}
        if dist:
            requirements = (parse_requirement(r)
                            for r in filter_requirements_with_extras(
                                dist.requires or [], extras))
            for req in requirements:
                if not req.marker or req.marker.evaluate(marker_env):
                    reqs[req.identify()] = req
            version: str | None = dist.version
        else:
            version = None

        node = Package(key, version, reqs)
        if node not in graph:
            if extras:
                node_with_extras.add(name)
            graph.add(node)

            for k in reqs:
                child = add_package(k, working_set.get(strip_extras(k)[0]))
                graph.connect(node, child)

        return node
Exemple #7
0
    def add_dependencies(self,
                         requirements: Dict[str, Requirement],
                         show_message: bool = True) -> None:
        for name, dep in requirements.items():
            if dep.from_section == "default":
                deps = self.tool_settings["dependencies"]
            elif dep.from_section == "dev":
                deps = self.tool_settings["dev-dependencies"]
            else:
                section = f"{dep.from_section}-dependencies"
                if section not in self.tool_settings:
                    self.tool_settings[section] = tomlkit.table()
                deps = self.tool_settings[section]

            matched_name = next(
                filter(lambda k: strip_extras(name)[0] == safe_name(k).lower(),
                       deps.keys()),
                None,
            )
            name_to_save = dep.name if matched_name is None else matched_name
            _, req_dict = dep.as_req_dict()
            if isinstance(req_dict, dict):
                req = tomlkit.inline_table()
                req.update(req_dict)
                req_dict = req
            deps[name_to_save] = req_dict
        self.write_pyproject(show_message)
Exemple #8
0
def _build_marker_and_pyspec(
    key: str,
    criterion: Criterion,
    pythons: Dict[str, PySpecSet],
    all_metasets: Dict[str, Tuple[Optional[Marker], PySpecSet]],
    keep_unresolved: Set[Optional[str]],
) -> Tuple[Optional[Marker], PySpecSet]:

    metasets = None

    for r, parent in criterion.information:
        if parent and _identify_parent(parent) in keep_unresolved:
            continue
        python = pythons[strip_extras(key)[0]]
        marker, pyspec = r.marker_no_python, r.requires_python
        pyspec = python & pyspec
        # Use 'and' to connect markers inherited from parent.
        if not parent:
            parent_metaset = None, PySpecSet()
        else:
            parent_metaset = all_metasets[_identify_parent(parent)]
        child_marker = (parent_metaset[0] & marker if any(
            (parent_metaset[0], marker)) else None)
        child_pyspec = parent_metaset[1] & pyspec
        if not metasets:
            metasets = child_marker, child_pyspec
        else:
            # Use 'or' to connect metasets inherited from different parents.
            marker = metasets[0] | child_marker if any(
                (child_marker, marker)) else None
            metasets = marker, metasets[1] | child_pyspec
    return metasets or (None, PySpecSet())
Exemple #9
0
def _build_marker_and_pyspec(dependencies, pythons, key, trace, all_metasets):
    all_parent_metasets = {}
    for route in trace:
        parent = route[-1]
        if parent in all_parent_metasets:
            continue
        try:
            parent_metaset = all_metasets[parent]
        except KeyError:  # Parent not calculated yet. Wait for it.
            if not parent.startswith("__"):
                return
            parent_metaset = None, PySpecSet()
        all_parent_metasets[parent] = parent_metaset

    metasets = None
    for parent, parent_metaset in all_parent_metasets.items():
        r = dependencies[parent][key]
        python = pythons[strip_extras(key)[0]]
        marker, pyspec = r.marker_no_python, r.requires_python
        pyspec = python & pyspec
        # Use 'and' to connect markers inherited from parent.
        child_marker = (parent_metaset[0] & marker if any(
            (parent_metaset[0], marker)) else None)
        child_pyspec = parent_metaset[1] & pyspec
        if not metasets:
            metasets = child_marker, child_pyspec
        else:
            # Use 'or' to connect metasets inherited from different parents.
            marker = metasets[0] | child_marker if any(
                (child_marker, marker)) else None
            metasets = marker, metasets[1] | child_pyspec
    return metasets or (None, PySpecSet())
Exemple #10
0
def resolve(
    resolver: Resolver,
    requirements: List[Requirement],
    requires_python: PySpecSet,
    max_rounds: int = 10000,
) -> Tuple[Dict[str, Candidate], Dict[str, List[Requirement]], Dict[str, str]]:
    """Core function to perform the actual resolve process.
    Return a tuple containing 3 items:

        1. A map of pinned candidates
        2. A map of resolved dependencies from each section of pyproject.toml
        3. A map of package descriptions fetched from PyPI source.
    """
    provider = resolver.provider
    result = resolver.resolve(requirements, max_rounds)

    mapping = result.mapping
    for key, candidate in list(result.mapping.items()):
        if key is None:
            continue
        # Root requires_python doesn't participate in the metaset resolving,
        # now check it!
        candidate_requires = provider.requires_python_collection[strip_extras(key)[0]]
        if (requires_python & candidate_requires).is_impossible:
            # Remove candidates that don't match requires_python constraint
            del mapping[key]
        else:
            candidate.requires_python = str(candidate_requires)
            candidate.hashes = provider.get_hashes(candidate)

    return mapping, provider.fetched_dependencies, provider.summary_collection
Exemple #11
0
 def update_candidate(self, key: str) -> Tuple[Distribution, Candidate]:
     """Update candidate"""
     can = self.candidates[key]
     dist = self.working_set[strip_extras(key)[0]]
     installer = self.get_installer()
     with self.ui.open_spinner(
         f"Updating {termui.green(key, bold=True)} {termui.yellow(dist.version)} "
         f"-> {termui.yellow(can.version)}..."
     ) as spinner:
         try:
             installer.uninstall(dist)
             installer.install(can)
         except Exception:
             spinner.fail(
                 f"Update {termui.green(key, bold=True)} "
                 f"{termui.yellow(dist.version)} -> "
                 f"{termui.yellow(can.version)} failed"
             )
             raise
         else:
             spinner.succeed(
                 f"Update {termui.green(key, bold=True)} "
                 f"{termui.yellow(dist.version)} -> "
                 f"{termui.yellow(can.version)} successful"
             )
     return dist, can
Exemple #12
0
def format_lockfile(
    mapping: Dict[str, Candidate],
    fetched_dependencies: Dict[str, List[Requirement]],
    summary_collection: Dict[str, str],
) -> Dict:
    """Format lock file from a dict of resolved candidates, a mapping of dependencies
    and a collection of package summaries.
    """
    packages = tomlkit.aot()
    file_hashes = tomlkit.table()
    for k, v in sorted(mapping.items()):
        base = tomlkit.table()
        base.update(v.as_lockfile_entry())
        base.add("summary", summary_collection[strip_extras(k)[0]])
        deps = make_array([r.as_line() for r in fetched_dependencies[k]], True)
        if len(deps) > 0:
            base.add("dependencies", deps)
        packages.append(base)
        if v.hashes:
            key = f"{k} {v.version}"
            array = tomlkit.array()
            array.multiline(True)
            for filename, hash_value in v.hashes.items():
                inline = make_inline_table({"file": filename, "hash": hash_value})
                array.append(inline)
            if array:
                file_hashes.add(key, array)
    doc = tomlkit.document()
    doc.add("package", packages)
    metadata = tomlkit.table()
    metadata.add("files", file_hashes)
    doc.add("metadata", metadata)
    return doc
Exemple #13
0
def format_lockfile(mapping, fetched_dependencies, summary_collection):
    """Format lock file from a dict of resolved candidates, a mapping of dependencies
    and a collection of package summaries.
    """
    packages = tomlkit.aot()
    metadata = tomlkit.table()
    for k, v in sorted(mapping.items()):
        base = tomlkit.table()
        base.update(v.as_lockfile_entry())
        base.add("summary", summary_collection[strip_extras(k)[0]])
        deps = tomlkit.table()
        for r in fetched_dependencies[k].values():
            name, req = r.as_req_dict()
            if getattr(req, "items", None) is not None:
                inline = tomlkit.inline_table()
                inline.update(req)
                deps.add(name, inline)
            else:
                deps.add(name, req)
        if len(deps) > 0:
            base.add("dependencies", deps)
        packages.append(base)
        if v.hashes:
            key = f"{k} {v.version}"
            array = tomlkit.array()
            array.multiline(True)
            for filename, hash_value in v.hashes.items():
                inline = tomlkit.inline_table()
                inline.update({"file": filename, "hash": hash_value})
                array.append(inline)
            if array:
                metadata.add(key, array)
    doc = tomlkit.document()
    doc.update({"package": packages, "metadata": metadata})
    return doc
Exemple #14
0
def resolve(
    resolver: Resolver,
    requirements: List[Requirement],
    requires_python: PySpecSet,
    max_rounds: int = 1000,
) -> Tuple[Dict[str, Candidate], Dict[str, List[Requirement]], Dict[str, str]]:
    """Core function to perform the actual resolve process.
    Return a tuple containing 3 items:

        1. A map of pinned candidates
        2. A map of resolved dependencies from each section of pyproject.toml
        3. A map of package descriptions fetched from PyPI source.
    """
    provider, reporter = resolver.provider, resolver.reporter
    result = resolver.resolve(requirements, max_rounds)

    reporter.extract_metadata()
    all_metasets = extract_metadata(result)

    mapping = result.mapping

    for key, metaset in all_metasets.items():
        if key is None:
            continue
        # Root requires_python doesn't participate in the metaset resolving,
        # now check it!
        python = (
            requires_python
            & metaset.requires_python
            & provider.requires_python_collection[strip_extras(key)[0]]
        )
        if python.is_impossible:
            # Candidate doesn't match requires_python constraint
            del mapping[key]
        else:
            candidate = mapping[key]
            candidate.marker = metaset.as_marker()
            candidate.hashes = provider.get_hashes(candidate)

    populate_sections(result)
    return mapping, provider.fetched_dependencies, provider.summary_collection
Exemple #15
0
def resolve(
    resolver: Resolver,
    requirements: list[Requirement],
    requires_python: PySpecSet,
    max_rounds: int = 10000,
) -> tuple[dict[str, Candidate], dict[str, list[Requirement]], dict[str, str]]:
    """Core function to perform the actual resolve process.
    Return a tuple containing 3 items:

        1. A map of pinned candidates
        2. A map of resolved dependencies for each dependency group
        3. A map of package descriptions fetched from PyPI source
    """
    provider = cast(BaseProvider, resolver.provider)
    result = resolver.resolve(requirements, max_rounds)

    mapping = cast(Dict[str, Candidate], result.mapping)
    for key, candidate in list(result.mapping.items()):
        if key is None:
            continue
        # For source distribution whose name can only be determined after it is built,
        # the key in the resolution map should be updated.
        if key.startswith(":empty:"):
            new_key = provider.identify(candidate)
            mapping[new_key] = mapping.pop(key)
            key = new_key
        # Root requires_python doesn't participate in the metaset resolving,
        # now check it!
        candidate_requires = provider.requires_python_collection[strip_extras(
            key)[0]]
        if (requires_python & candidate_requires).is_impossible:
            # Remove candidates that don't match requires_python constraint
            del mapping[key]
        else:
            candidate.requires_python = str(candidate_requires)
            candidate.hashes = provider.get_hashes(candidate)

    return mapping, provider.fetched_dependencies, provider.summary_collection
Exemple #16
0
def format_lockfile(
    mapping: dict[str, Candidate],
    fetched_dependencies: dict[str, list[Requirement]],
    summary_collection: dict[str, str],
) -> dict:
    """Format lock file from a dict of resolved candidates, a mapping of dependencies
    and a collection of package summaries.
    """
    packages = atoml.aot()
    file_hashes = atoml.table()
    for k, v in sorted(mapping.items()):
        base = atoml.table()
        base.update(v.as_lockfile_entry())  # type: ignore
        base.add("summary", summary_collection[strip_extras(k)[0]])
        deps = make_array(sorted(r.as_line() for r in fetched_dependencies[k]),
                          True)
        if len(deps) > 0:
            base.add("dependencies", deps)
        packages.append(base)  # type: ignore
        if v.hashes:
            key = f"{strip_extras(k)[0]} {v.version}"
            if key in file_hashes:
                continue
            array = atoml.array().multiline(True)
            for filename, hash_value in v.hashes.items():
                inline = make_inline_table({
                    "file": filename,
                    "hash": hash_value
                })
                array.append(inline)  # type: ignore
            if array:
                file_hashes.add(key, array)
    doc = atoml.document()
    doc.add("package", packages)  # type: ignore
    metadata = atoml.table()
    metadata.add("files", file_hashes)
    doc.add("metadata", metadata)  # type: ignore
    return cast(dict, doc)
Exemple #17
0
def do_update(
        project: Project,
        dev: bool = False,
        sections: Sequence[str] = (),
        default: bool = True,
        strategy: str = "reuse",
        save: str = "compatible",
        unconstrained: bool = False,
        top: bool = False,
        dry_run: bool = False,
        packages: Sequence[str] = (),
) -> None:
    """Update specified packages or all packages"""
    check_project_file(project)
    if len(packages) > 0 and (top or len(sections) > 1 or not default):
        raise PdmUsageError(
            "packages argument can't be used together with multiple -s or "
            "--no-default and --top.")
    all_dependencies = project.all_dependencies
    updated_deps = {}
    if not packages:
        sections = translate_sections(project, default, dev, sections or ())
        for section in sections:
            updated_deps.update(all_dependencies[section])
    else:
        section = sections[0] if sections else ("dev" if dev else "default")
        dependencies = all_dependencies[section]
        for name in packages:
            matched_name = next(
                filter(
                    lambda k: safe_name(strip_extras(k)[0]).lower() ==
                    safe_name(name).lower(),
                    dependencies.keys(),
                ),
                None,
            )
            if not matched_name:
                raise ProjectError(
                    "{} does not exist in {} {}dependencies.".format(
                        termui.green(name, bold=True), section,
                        "dev-" if dev else ""))
            updated_deps[matched_name] = dependencies[matched_name]
        project.core.ui.echo("Updating packages: {}.".format(", ".join(
            termui.green(v, bold=True) for v in updated_deps)))
    if unconstrained:
        for _, dep in updated_deps.items():
            dep.specifier = get_specifier("")
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(
        project,
        strategy if top or packages else "all",
        updated_deps.keys(),
        reqs,
        dry_run=dry_run,
    )
    do_sync(
        project,
        sections=sections,
        dev=dev,
        default=default,
        clean=False,
        dry_run=dry_run,
        tracked_names=updated_deps.keys() if top else None,
    )
    if unconstrained and not dry_run:
        # Need to update version constraints
        save_version_specifiers(updated_deps, resolved, save)
        project.add_dependencies(updated_deps, section, dev)
        lockfile = project.lockfile
        project.write_lockfile(lockfile, False)
Exemple #18
0
def do_update(
        project: Project,
        dev: bool = False,
        sections: Sequence[str] = (),
        default: bool = True,
        strategy: str = "reuse",
        save: str = "compatible",
        unconstrained: bool = False,
        packages: Sequence[str] = (),
) -> None:
    """Update specified packages or all packages

    :param project: The project instance
    :param dev: whether to update dev dependencies
    :param sections: update speicified sections
    :param default: update default
    :param strategy: update strategy (reuse/eager)
    :param save: save strategy (compatible/exact/wildcard)
    :param unconstrained: ignore version constraint
    :param packages: specified packages to update
    :return: None
    """
    check_project_file(project)
    if len(packages) > 0 and (len(sections) > 1 or not default):
        raise click.BadParameter(
            "packages argument can't be used together with multple -s or --no-default."
        )
    if not packages:
        if unconstrained:
            raise click.BadArgumentUsage(
                "--unconstrained must be used with package names given.")
        # pdm update with no packages given, same as 'lock' + 'sync'
        do_lock(project)
        do_sync(project, sections, dev, default, clean=False)
        return
    section = sections[0] if sections else ("dev" if dev else "default")
    all_dependencies = project.all_dependencies
    dependencies = all_dependencies[section]
    updated_deps = {}
    tracked_names = set()
    for name in packages:
        matched_name = next(
            filter(
                lambda k: safe_name(strip_extras(k)[0]).lower() == safe_name(
                    name).lower(),
                dependencies.keys(),
            ),
            None,
        )
        if not matched_name:
            raise ProjectError("{} does not exist in {} dependencies.".format(
                context.io.green(name, bold=True), section))
        if unconstrained:
            dependencies[matched_name].specifier = get_specifier("")
        tracked_names.add(matched_name)
        updated_deps[matched_name] = dependencies[matched_name]
    context.io.echo("Updating packages: {}.".format(", ".join(
        context.io.green(v, bold=True) for v in tracked_names)))
    resolved = do_lock(project, strategy, tracked_names, all_dependencies)
    do_sync(project, sections=(section, ), default=False, clean=False)
    if unconstrained:
        # Need to update version constraints
        save_version_specifiers(updated_deps, resolved, save)
        project.add_dependencies(updated_deps)
        lockfile = project.lockfile
        lockfile["root"]["content_hash"] = "md5:" + project.get_content_hash(
            "md5")
        project.write_lockfile(lockfile, False)
Exemple #19
0
def do_update(
    project: Project,
    *,
    dev: bool | None = None,
    groups: Sequence[str] = (),
    default: bool = True,
    strategy: str = "reuse",
    save: str = "compatible",
    unconstrained: bool = False,
    top: bool = False,
    dry_run: bool = False,
    packages: Collection[str] = (),
    sync: bool = True,
    no_editable: bool = False,
    no_self: bool = False,
    prerelease: bool = False,
) -> None:
    """Update specified packages or all packages"""
    check_project_file(project)
    if len(packages) > 0 and (top or len(groups) > 1 or not default):
        raise PdmUsageError(
            "packages argument can't be used together with multiple -G or "
            "--no-default and --top."
        )
    all_dependencies = project.all_dependencies
    updated_deps: dict[str, dict[str, Requirement]] = defaultdict(dict)
    install_dev = True if dev is None else dev
    if not packages:
        if prerelease:
            raise PdmUsageError("--prerelease must be used with packages given")
        groups = translate_groups(project, default, install_dev, groups or ())
        for group in groups:
            updated_deps[group] = all_dependencies[group]
    else:
        group = groups[0] if groups else ("dev" if dev else "default")
        dependencies = all_dependencies[group]
        for name in packages:
            matched_name = next(
                filter(
                    lambda k: normalize_name(strip_extras(k)[0])
                    == normalize_name(name),
                    dependencies.keys(),
                ),
                None,
            )
            if not matched_name:
                raise ProjectError(
                    "{} does not exist in {} {}dependencies.".format(
                        termui.green(name, bold=True), group, "dev-" if dev else ""
                    )
                )
            dependencies[matched_name].prerelease = prerelease
            updated_deps[group][matched_name] = dependencies[matched_name]
        project.core.ui.echo(
            "Updating packages: {}.".format(
                ", ".join(
                    termui.green(v, bold=True)
                    for v in chain.from_iterable(updated_deps.values())
                )
            )
        )
    if unconstrained:
        for deps in updated_deps.values():
            for dep in deps.values():
                dep.specifier = get_specifier("")
    reqs = [r for deps in all_dependencies.values() for r in deps.values()]
    resolved = do_lock(
        project,
        strategy,
        chain.from_iterable(updated_deps.values()),
        reqs,
        dry_run=dry_run,
    )
    if sync or dry_run:
        do_sync(
            project,
            groups=groups,
            dev=install_dev,
            default=default,
            clean=False,
            dry_run=dry_run,
            requirements=[r for deps in updated_deps.values() for r in deps.values()],
            tracked_names=list(chain.from_iterable(updated_deps.values()))
            if top
            else None,
            no_editable=no_editable,
            no_self=no_self,
        )
    if unconstrained and not dry_run:
        # Need to update version constraints
        save_version_specifiers(updated_deps, resolved, save)
        for group, deps in updated_deps.items():
            project.add_dependencies(deps, group, dev or False)
        lockfile = project.lockfile
        project.write_lockfile(lockfile, False)