Esempio n. 1
0
def test_add_header_to_lockfile() -> None:
    input_lockfile = b"""dave==3.1.4 \\
    --hash=sha256:cab0c0c0c0c0dadacafec0c0c0c0cafedadabeefc0c0c0c0feedbeeffeedbeef \\
    """

    expected = b"""
# This lockfile was autogenerated by Pants. To regenerate, run:
#
#    ./pants lock
#
# --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---
# {
#   "version": 2,
#   "valid_for_interpreter_constraints": [
#     "CPython>=3.7"
#   ],
#   "generated_with_requirements": [
#     "ansicolors==0.1.0"
#   ]
# }
# --- END PANTS LOCKFILE METADATA ---
dave==3.1.4 \\
    --hash=sha256:cab0c0c0c0c0dadacafec0c0c0c0cafedadabeefc0c0c0c0feedbeeffeedbeef \\
    """

    def line_by_line(b: bytes) -> list[bytes]:
        return [i for i in (j.strip() for j in b.splitlines()) if i]

    metadata = LockfileMetadata.new(InterpreterConstraints([">=3.7"]),
                                    reqset("ansicolors==0.1.0"))
    result = metadata.add_header_to_lockfile(input_lockfile,
                                             regenerate_command="./pants lock")
    assert line_by_line(result) == line_by_line(expected)
Esempio n. 2
0
def test_metadata_header_round_trip() -> None:
    input_metadata = LockfileMetadata.new(
        InterpreterConstraints(
            ["CPython==2.7.*", "PyPy", "CPython>=3.6,<4,!=3.7.*"]),
        reqset("ansicolors==0.1.0"),
    )
    serialized_lockfile = input_metadata.add_header_to_lockfile(
        b"req1==1.0", regenerate_command="./pants lock")
    output_metadata = LockfileMetadata.from_lockfile(serialized_lockfile)
    assert input_metadata == output_metadata
Esempio n. 3
0
async def generate_lockfile(
    req: PythonLockfileRequest,
    poetry_subsystem: PoetrySubsystem,
    generate_lockfiles_subsystem: GenerateLockfilesSubsystem,
) -> PythonLockfile:
    pyproject_toml = create_pyproject_toml(req.requirements, req.interpreter_constraints).encode()
    pyproject_toml_digest, launcher_digest = await MultiGet(
        Get(Digest, CreateDigest([FileContent("pyproject.toml", pyproject_toml)])),
        Get(Digest, CreateDigest([POETRY_LAUNCHER])),
    )

    poetry_pex = await Get(
        VenvPex,
        PexRequest(
            output_filename="poetry.pex",
            internal_only=True,
            requirements=poetry_subsystem.pex_requirements(),
            interpreter_constraints=poetry_subsystem.interpreter_constraints,
            main=EntryPoint(PurePath(POETRY_LAUNCHER.path).stem),
            sources=launcher_digest,
        ),
    )

    # WONTFIX(#12314): Wire up Poetry to named_caches.
    # WONTFIX(#12314): Wire up all the pip options like indexes.
    poetry_lock_result = await Get(
        ProcessResult,
        VenvPexProcess(
            poetry_pex,
            argv=("lock",),
            input_digest=pyproject_toml_digest,
            output_files=("poetry.lock", "pyproject.toml"),
            description=req._description or f"Generate lockfile for {req.resolve_name}",
            # Instead of caching lockfile generation with LMDB, we instead use the invalidation
            # scheme from `lockfile_metadata.py` to check for stale/invalid lockfiles. This is
            # necessary so that our invalidation is resilient to deleting LMDB or running on a
            # new machine.
            #
            # We disable caching with LMDB so that when you generate a lockfile, you always get
            # the most up-to-date snapshot of the world. This is generally desirable and also
            # necessary to avoid an awkward edge case where different developers generate different
            # lockfiles even when generating at the same time. See
            # https://github.com/pantsbuild/pants/issues/12591.
            cache_scope=ProcessCacheScope.PER_SESSION,
        ),
    )
    poetry_export_result = await Get(
        ProcessResult,
        VenvPexProcess(
            poetry_pex,
            argv=("export", "-o", req.lockfile_dest),
            input_digest=poetry_lock_result.output_digest,
            output_files=(req.lockfile_dest,),
            description=(
                f"Exporting Poetry lockfile to requirements.txt format for {req.resolve_name}"
            ),
            level=LogLevel.DEBUG,
        ),
    )

    initial_lockfile_digest_contents = await Get(
        DigestContents, Digest, poetry_export_result.output_digest
    )
    # TODO(#12314) Improve error message on `Requirement.parse`
    metadata = LockfileMetadata.new(
        req.interpreter_constraints,
        {PipRequirement.parse(i) for i in req.requirements},
    )
    lockfile_with_header = metadata.add_header_to_lockfile(
        initial_lockfile_digest_contents[0].content,
        regenerate_command=(
            generate_lockfiles_subsystem.custom_command
            or req._regenerate_command
            or f"./pants generate-lockfiles --resolve={req.resolve_name}"
        ),
    )
    final_lockfile_digest = await Get(
        Digest, CreateDigest([FileContent(req.lockfile_dest, lockfile_with_header)])
    )
    return PythonLockfile(final_lockfile_digest, req.resolve_name, req.lockfile_dest)