Exemple #1
0
def upload(c, directory, index=None, sign=False, dry_run=False):
    """
    Upload (potentially also signing) all artifacts in ``directory``.

    :param str index:
        Custom upload index/repository name.

        By default, uses whatever the invoked ``pip`` is configured to use.
        Modify your ``pypirc`` file to add new named repositories.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.
    """
    # Obtain list of archive filenames, then ensure any wheels come first
    # so their improved metadata is what PyPI sees initially (otherwise, it
    # only honors the sdist's lesser data).
    archives = list(itertools.chain.from_iterable(
        glob(os.path.join(directory, 'dist', '*.{0}'.format(extension)))
        for extension in ('whl', 'tar.gz')
    ))
    # Sign each archive in turn
    # TODO: twine has a --sign option; but the below is still nice insofar
    # as it lets us dry-run, generate for web upload when pypi's API is
    # being cranky, etc. Figure out which is better.
    if sign:
        prompt = "Please enter GPG passphrase for signing: "
        input_ = StringIO(getpass.getpass(prompt) + "\n")
        gpg_bin = find_gpg(c)
        if not gpg_bin:
            sys.exit("You need to have one of `gpg`, `gpg1` or `gpg2` installed to GPG-sign!") # noqa
        for archive in archives:
            cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(gpg_bin) # noqa
            c.run(cmd.format(archive), in_stream=input_)
            input_.seek(0) # So it can be replayed by subsequent iterations
    # Upload
    parts = ["twine", "upload"]
    if index:
        index_arg = "--repository {0}".format(index)
    if index:
        parts.append(index_arg)
    paths = archives[:]
    if sign:
        paths.append(os.path.join(directory, 'dist', "*.asc"))
    parts.extend(paths)
    cmd = " ".join(parts)
    if dry_run:
        print("Would publish via: {0}".format(cmd))
        print("Files that would be published:")
        c.run("ls -l {0}".format(" ".join(paths)))
    else:
        c.run(cmd)
Exemple #2
0
def upload(c, directory, index=None, sign=False, dry_run=False):
    """
    Upload (potentially also signing) all artifacts in ``directory``.

    :param str index:
        Custom upload index/repository name.

        By default, uses whatever the invoked ``pip`` is configured to use.
        Modify your ``pypirc`` file to add new named repositories.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.
    """
    # Obtain list of archive filenames, then ensure any wheels come first
    # so their improved metadata is what PyPI sees initially (otherwise, it
    # only honors the sdist's lesser data).
    archives = list(
        itertools.chain.from_iterable(
            glob(os.path.join(directory, "dist", "*.{0}".format(extension)))
            for extension in ("whl", "tar.gz")
        )
    )
    # Sign each archive in turn
    # TODO: twine has a --sign option; but the below is still nice insofar
    # as it lets us dry-run, generate for web upload when pypi's API is
    # being cranky, etc. Figure out which is better.
    if sign:
        prompt = "Please enter GPG passphrase for signing: "
        input_ = StringIO(getpass.getpass(prompt) + "\n")
        gpg_bin = find_gpg(c)
        if not gpg_bin:
            sys.exit(
                "You need to have one of `gpg`, `gpg1` or `gpg2` "
                "installed to GPG-sign!"
            )
        for archive in archives:
            cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(
                gpg_bin
            )  # noqa
            c.run(cmd.format(archive), in_stream=input_)
            input_.seek(0)  # So it can be replayed by subsequent iterations
    # Upload
    parts = ["twine", "upload"]
    if index:
        index_arg = "--repository {0}".format(index)
    if index:
        parts.append(index_arg)
    paths = archives[:]
    if sign:
        paths.append(os.path.join(directory, "dist", "*.asc"))
    parts.extend(paths)
    cmd = " ".join(parts)
    if dry_run:
        print("Would publish via: {0}".format(cmd))
        print("Files that would be published:")
        c.run("ls -l {0}".format(" ".join(paths)))
    else:
        c.run(cmd)
Exemple #3
0
def publish(
    c,
    sdist=True,
    wheel=False,
    index=None,
    sign=False,
    dry_run=False,
    directory=None,
    dual_wheels=False,
    alt_python=None,
):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` and ``directory`` honor config settings of
    the same name, under the ``packaging`` tree. E.g. say
    ``.configure({'packaging': {'wheel': True}})`` to force building wheel
    archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.

    :param str directory:
        Base directory within which will live the ``dist/`` and ``build/``
        directories.

        Defaults to a temporary directory which is cleaned up after the run
        finishes.

    :param bool dual_wheels:
        When ``True``, builds individual wheels for Python 2 and Python 3.

        Useful for situations where you can't build universal wheels, but still
        want to distribute for both interpreter versions.

        Requires that you have a useful ``python3`` (or ``python2``, if you're
        on Python 3 already) binary in your ``$PATH``. Also requires that this
        other python have the ``wheel`` package installed in its
        ``site-packages``; usually this will mean the global site-packages for
        that interpreter.

        See also the ``alt_python`` argument.

    :param str alt_python:
        Path to the 'alternate' Python interpreter to use when
        ``dual_wheels=True``.

        When ``None`` (the default) will be ``python3`` or ``python2``,
        depending on the currently active interpreter.
    """
    # Don't hide by default, this step likes to be verbose most of the time.
    c.config.run.hide = False
    # Config hooks
    config = c.config.get("packaging", {})
    index = config.get("index", index)
    sign = config.get("sign", sign)
    dual_wheels = config.get("dual_wheels", dual_wheels)
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir(skip_cleanup=dry_run, explicit=directory) as tmp:
        # Build default archives
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Build opposing interpreter archive, if necessary
        if dual_wheels:
            if not alt_python:
                alt_python = "python2"
                if sys.version_info[0] == 2:
                    alt_python = "python3"
            build(c, sdist=False, wheel=True, directory=tmp, python=alt_python)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(
            itertools.chain.from_iterable(
                glob(os.path.join(tmp, "dist", "*.{0}".format(extension))) for extension in ("whl", "tar.gz")
            )
        )
        # Sign each archive in turn
        # TODO: twine has a --sign option; but the below is still nice insofar
        # as it lets us dry-run, generate for web upload when pypi's API is
        # being cranky, etc. Figure out which is better.
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            gpg_bin = find_gpg(c)
            if not gpg_bin:
                sys.exit("You need to have one of `gpg`, `gpg1` or `gpg2` installed to GPG-sign!")  # noqa
            for archive in archives:
                cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(gpg_bin)  # noqa
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(0)  # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives + [os.path.join(tmp, "dist", "*.asc")]
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #4
0
def publish(c, sdist=True, wheel=False, index=None, sign=False, dry_run=False):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` honor config settings of the same name,
    under the ``packaging`` tree. E.g. say ``.configure({'packaging': {'wheel':
    True}})`` to force building wheel archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.
    """
    # Config hooks
    config = c.config.get('packaging', {})
    index = config.get('index', index)
    sign = config.get('sign', sign)
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir() as tmp:
        # Build
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(
            itertools.chain.from_iterable(
                glob(os.path.join(tmp, '*.{0}'.format(extension)))
                for extension in ('whl', 'tar.gz')))
        # Sign each archive in turn
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            for archive in archives:
                cmd = "gpg --detach-sign -a --passphrase-fd 0 {0}"
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(
                    0)  # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives + [os.path.join(tmp, "*.asc")]
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #5
0
def publish(c, sdist=True, wheel=False, index=None, sign=False, dry_run=False):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` honor config settings of the same name,
    under the ``packaging`` tree. E.g. say ``.configure({'packaging': {'wheel':
    True}})`` to force building wheel archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.
    """
    # Config hooks
    config = c.config.get('packaging', {})
    index = config.get('index', index)
    sign = config.get('sign', sign)
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir() as tmp:
        # Build
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(itertools.chain.from_iterable(
            glob(os.path.join(tmp, '*.{0}'.format(extension)))
            for extension in ('whl', 'tar.gz')
        ))
        # Sign each archive in turn
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            for archive in archives:
                cmd = "gpg --detach-sign -a --passphrase-fd 0 {0}"
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(0) # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives + [os.path.join(tmp, "*.asc")]
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #6
0
def publish(c,
            sdist=True,
            wheel=False,
            index=None,
            sign=False,
            dry_run=False,
            directory=None,
            dual_wheels=False,
            alt_python=None,
            check_desc=False):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` and ``directory`` honor config settings of
    the same name, under the ``packaging`` tree. E.g. say
    ``.configure({'packaging': {'wheel': True}})`` to force building wheel
    archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.

    :param str directory:
        Base directory within which will live the ``dist/`` and ``build/``
        directories.

        Defaults to a temporary directory which is cleaned up after the run
        finishes.

    :param bool dual_wheels:
        When ``True``, builds individual wheels for Python 2 and Python 3.

        Useful for situations where you can't build universal wheels, but still
        want to distribute for both interpreter versions.

        Requires that you have a useful ``python3`` (or ``python2``, if you're
        on Python 3 already) binary in your ``$PATH``. Also requires that this
        other python have the ``wheel`` package installed in its
        ``site-packages``; usually this will mean the global site-packages for
        that interpreter.

        See also the ``alt_python`` argument.

    :param str alt_python:
        Path to the 'alternate' Python interpreter to use when
        ``dual_wheels=True``.

        When ``None`` (the default) will be ``python3`` or ``python2``,
        depending on the currently active interpreter.

    :param bool check_desc:
        Whether to run ``setup.py check -r -s`` (uses ``readme_renderer``)
        before trying to publish - catches long_description bugs. Default:
        ``False``.
    """
    # Don't hide by default, this step likes to be verbose most of the time.
    c.config.run.hide = False
    # Config hooks
    config = c.config.get('packaging', {})
    index = config.get('index', index)
    sign = config.get('sign', sign)
    dual_wheels = config.get('dual_wheels', dual_wheels)
    check_desc = config.get('check_desc', check_desc)
    # Initial sanity check, if needed. Will die usefully. (Or, on Python 2.6 or
    # 3.3, it will die no matter what, as they never got the fix from
    # https://bugs.python.org/issue23063 ...so we gotta skip it there.)
    ver = sys.version_info[:2]
    if check_desc and ver not in [(2, 6), (3, 3)]:
        c.run("python setup.py check -r -s")
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir(skip_cleanup=dry_run, explicit=directory) as tmp:
        # Build default archives
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Build opposing interpreter archive, if necessary
        if dual_wheels:
            if not alt_python:
                alt_python = 'python2'
                if sys.version_info[0] == 2:
                    alt_python = 'python3'
            build(c, sdist=False, wheel=True, directory=tmp, python=alt_python)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(
            itertools.chain.from_iterable(
                glob(os.path.join(tmp, 'dist', '*.{0}'.format(extension)))
                for extension in ('whl', 'tar.gz')))
        # Sign each archive in turn
        # TODO: twine has a --sign option; but the below is still nice insofar
        # as it lets us dry-run, generate for web upload when pypi's API is
        # being cranky, etc. Figure out which is better.
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            gpg_bin = find_gpg(c)
            if not gpg_bin:
                sys.exit(
                    "You need to have one of `gpg`, `gpg1` or `gpg2` installed to GPG-sign!"
                )  # noqa
            for archive in archives:
                cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(
                    gpg_bin)  # noqa
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(
                    0)  # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives[:]
        if sign:
            paths.append(os.path.join(tmp, 'dist', "*.asc"))
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)