Exemple #1
0
        def wrapper(*args, **kwargs):
            args = list(args)
            pty, os = args.pop(), args.pop()
            # Don't actually fork, but pretend we did & that main thread is
            # also the child (pid 0) to trigger execv call; & give 'parent fd'
            # of 1 (stdout).
            pty.fork.return_value = 0, 1
            # We don't really need to care about waiting since not truly
            # forking/etc, so here we just return a nonzero "pid" + dummy value
            # (normally sent to WEXITSTATUS but we mock that anyway, so.)
            os.waitpid.return_value = None, None
            os.WEXITSTATUS.return_value = exit
            # If requested, mock isatty to fake out pty detection
            if isatty is not None:
                os.isatty.return_value = isatty
            out_file = StringIO(out)
            err_file = StringIO(err)

            def fakeread(fileno, count):
                fd = {1: out_file, 2: err_file}[fileno]
                return fd.read(count)

            os.read.side_effect = fakeread
            f(*args, **kwargs)
            # Sanity checks to make sure the stuff we mocked, actually got ran!
            pty.fork.assert_called_with()
            for name in ('execv', 'waitpid', 'WEXITSTATUS'):
                assert getattr(os, name).called
Exemple #2
0
 def writes_remote_streams_to_local_streams(self, remote):
     remote.expect(out=b"hello yes this is dog")
     c = _Connection('host')
     r = Remote(context=c)
     fakeout = StringIO()
     r.run(CMD, out_stream=fakeout)
     assert fakeout.getvalue() == "hello yes this is dog"
Exemple #3
0
 def writes_remote_streams_to_local_streams(self, remote):
     remote.expect(out=b"hello yes this is dog")
     c = _Connection('host')
     r = Remote(context=c)
     fakeout = StringIO()
     r.run(CMD, out_stream=fakeout)
     assert fakeout.getvalue() == "hello yes this is dog"
Exemple #4
0
def upload(c, directory, index=None, sign=False, dry_run=False):
    """
    Upload (potentially also signing) all artifacts in ``directory``.

    :param str index:
        Custom upload index/repository name.

        By default, uses whatever the invoked ``pip`` is configured to use.
        Modify your ``pypirc`` file to add new named repositories.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.
    """
    # Obtain list of archive filenames, then ensure any wheels come first
    # so their improved metadata is what PyPI sees initially (otherwise, it
    # only honors the sdist's lesser data).
    archives = list(itertools.chain.from_iterable(
        glob(os.path.join(directory, 'dist', '*.{0}'.format(extension)))
        for extension in ('whl', 'tar.gz')
    ))
    # Sign each archive in turn
    # TODO: twine has a --sign option; but the below is still nice insofar
    # as it lets us dry-run, generate for web upload when pypi's API is
    # being cranky, etc. Figure out which is better.
    if sign:
        prompt = "Please enter GPG passphrase for signing: "
        input_ = StringIO(getpass.getpass(prompt) + "\n")
        gpg_bin = find_gpg(c)
        if not gpg_bin:
            sys.exit("You need to have one of `gpg`, `gpg1` or `gpg2` installed to GPG-sign!") # noqa
        for archive in archives:
            cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(gpg_bin) # noqa
            c.run(cmd.format(archive), in_stream=input_)
            input_.seek(0) # So it can be replayed by subsequent iterations
    # Upload
    parts = ["twine", "upload"]
    if index:
        index_arg = "--repository {0}".format(index)
    if index:
        parts.append(index_arg)
    paths = archives[:]
    if sign:
        paths.append(os.path.join(directory, 'dist', "*.asc"))
    parts.extend(paths)
    cmd = " ".join(parts)
    if dry_run:
        print("Would publish via: {0}".format(cmd))
        print("Files that would be published:")
        c.run("ls -l {0}".format(" ".join(paths)))
    else:
        c.run(cmd)
 def name_attribute_present_appends_like_basename(
         self, sftp_objs):
     xfer, sftp = sftp_objs
     sftp.stat.return_value.st_mode = 0o41777
     local = StringIO("sup\n")
     local.name = "sup.txt"
     xfer.put(local, remote="/dir/path")
     sftp.putfo.assert_called_with(
         fl=local, remotepath="/dir/path/sup.txt")
Exemple #6
0
def _runner(out='', err='', **kwargs):
    klass = kwargs.pop('klass', _Dummy)
    runner = klass(Context(config=Config(overrides=kwargs)))
    if 'exits' in kwargs:
        runner.returncode = Mock(return_value=kwargs.pop('exits'))
    out_file = StringIO(out)
    err_file = StringIO(err)
    runner.read_stdout = out_file.read
    runner.read_stderr = err_file.read
    return runner
Exemple #7
0
 def subclasses_can_override_input_sleep(self):
     class MyRunner(_Dummy):
         input_sleep = 0.007
     with patch('invoke.runners.time') as mock_time:
         MyRunner(Context()).run(
             _,
             in_stream=StringIO("foo"),
             out_stream=StringIO(), # null output to not pollute tests
         )
     eq_(mock_time.sleep.call_args_list, [call(0.007)] * 3)
Exemple #8
0
 def can_be_overridden(self):
     klass = self._mock_stdin_writer()
     in_stream = StringIO("Hey, listen!")
     self._runner(klass=klass).run(
         _,
         in_stream=in_stream,
         out_stream=StringIO(),
     )
     # stdin mirroring occurs char-by-char
     calls = list(map(lambda x: call(x), "Hey, listen!"))
     klass.write_proc_stdin.assert_has_calls(calls, any_order=False)
Exemple #9
0
 def name_attribute_present_appends_like_basename(
     self, sftp_objs
 ):
     xfer, sftp = sftp_objs
     sftp.stat.return_value.st_mode = 0o41777
     local = StringIO("sup\n")
     local.name = "sup.txt"
     xfer.put(local, remote="/dir/path")
     sftp.putfo.assert_called_with(
         fl=local, remotepath="/dir/path/sup.txt"
     )
Exemple #10
0
def _runner(out='', err='', **kwargs):
    klass = kwargs.pop('klass', Dummy)
    runner = klass(Context(config=Config(overrides=kwargs)))
    if 'exits' in kwargs:
        runner.returncode = Mock(return_value=kwargs.pop('exits'))
    out_file = StringIO(out)
    err_file = StringIO(err)
    def out_reader(count):
        return out_file.read(count)
    def err_reader(count):
        return err_file.read(count)
    runner.stdout_reader = lambda: out_reader
    runner.stderr_reader = lambda: err_reader
    return runner
Exemple #11
0
        def _test_mirroring(self, expect_mirroring, **kwargs):
            # Setup
            fake_in = "I'm typing!"
            output = Mock()
            input_ = StringIO(fake_in)
            input_is_pty = kwargs.pop('in_pty', None)

            class MyRunner(_Dummy):
                def should_echo_stdin(self, input_, output):
                    # Fake result of isatty() test here and only here; if we do
                    # this farther up, it will affect stuff trying to run
                    # termios & such, which is harder to mock successfully.
                    if input_is_pty is not None:
                        input_.isatty = lambda: input_is_pty
                    return super(MyRunner,
                                 self).should_echo_stdin(input_, output)

            # Execute basic command with given parameters
            self._run(_,
                      klass=MyRunner,
                      in_stream=input_,
                      out_stream=output,
                      **kwargs)
            # Examine mocked output stream to see if it was mirrored to
            if expect_mirroring:
                eq_(output.write.call_args_list,
                    list(map(lambda x: call(x), fake_in)))
                eq_(len(output.flush.call_args_list), len(fake_in))
            # Or not mirrored to
            else:
                eq_(output.write.call_args_list, [])
Exemple #12
0
 def defaults_to_sys_stdin(self):
     # Execute w/ runner class that has a mocked stdin_writer
     klass = self._mock_stdin_writer()
     self._runner(klass=klass).run(_, out_stream=StringIO())
     # Check that mocked writer was called w/ the data from our patched
     # sys.stdin (one char at a time)
     calls = list(map(lambda x: call(x), "Text!"))
     klass.write_proc_stdin.assert_has_calls(calls, any_order=False)
Exemple #13
0
 def _get_to_stringio(self, sftp_objs):
     transfer, client = sftp_objs
     fd = StringIO()
     result = transfer.get("file", local=fd)
     # Note: getfo, not get
     client.getfo.assert_called_with(remotepath="/remote/file",
                                     fl=fd)
     return result, fd
Exemple #14
0
 def _put_from_stringio(self, sftp_objs):
     transfer, client = sftp_objs
     fd = StringIO()
     result = transfer.put(fd, remote="file")
     # Note: putfo, not put
     client.putfo.assert_called_with(remotepath="/remote/file",
                                     fl=fd)
     return result, fd
Exemple #15
0
 def wrapper(*args, **kwargs):
     args = list(args)
     Popen, read, sys_stdin = args.pop(), args.pop(), args.pop()
     process = Popen.return_value
     process.returncode = exit
     process.stdout.fileno.return_value = 1
     process.stderr.fileno.return_value = 2
     # If requested, mock isatty to fake out pty detection
     if isatty is not None:
         sys_stdin.isatty = Mock(return_value=isatty)
     out_file = StringIO(out)
     err_file = StringIO(err)
     def fakeread(fileno, count):
         fd = {1: out_file, 2: err_file}[fileno]
         return fd.read(count)
     read.side_effect = fakeread
     f(*args, **kwargs)
Exemple #16
0
 def defaults_to_sys_stdin(self):
     # Execute w/ runner class that has a mocked stdin_writer
     klass = self._mock_stdin_writer()
     self._runner(klass=klass).run(_, out_stream=StringIO())
     # Check that mocked writer was called w/ expected data
     # stdin mirroring occurs byte-by-byte
     calls = list(map(lambda x: call(b(x)), "Text!"))
     klass.write_stdin.assert_has_calls(calls, any_order=False)
Exemple #17
0
 def base_case(self):
     result = Connection("localhost").shell(
         in_stream=StringIO("exit\n"))
     assert result.command is None
     # Will also include any shell prompt, etc but just looking for the
     # mirrored input is most test-env-agnostic way to spot check...
     assert "exit" in result.stdout
     assert result.stderr == ""
     assert result.exited == 0
     assert result.pty is True
Exemple #18
0
 def defaults_to_sys_stdin(self):
     # Execute w/ runner class that has a mocked stdin_writer
     klass = self._mock_stdin_writer()
     self._runner(klass=klass).run(_, out_stream=StringIO())
     # Check that mocked writer was called w/ the data from our patched
     # sys.stdin.
     # NOTE: this also tests that non-fileno-bearing streams read/write
     # 1 byte at a time. See farther-down test for fileno-bearing stdin
     calls = list(map(lambda x: call(x), "Text!"))
     klass.write_proc_stdin.assert_has_calls(calls, any_order=False)
Exemple #19
0
        def wrapper(*args, **kwargs):
            args = list(args)
            pty, os, ioctl = args.pop(), args.pop(), args.pop()
            # Don't actually fork, but pretend we did & that main thread is
            # also the child (pid 0) to trigger execve call; & give 'parent fd'
            # of 1 (stdout).
            pty.fork.return_value = 0, 1
            # We don't really need to care about waiting since not truly
            # forking/etc, so here we just return a nonzero "pid" + dummy value
            # (normally sent to WEXITSTATUS but we mock that anyway, so.)
            os.waitpid.return_value = None, None
            os.WEXITSTATUS.return_value = exit
            # If requested, mock isatty to fake out pty detection
            if isatty is not None:
                os.isatty.return_value = isatty
            out_file = StringIO(out)
            err_file = StringIO(err)

            def fakeread(fileno, count):
                fd = {1: out_file, 2: err_file}[fileno]
                ret = fd.read(count)
                # If asked, fake a Linux-platform trailing I/O error.
                if not ret and trailing_error:
                    raise trailing_error
                return ret

            os.read.side_effect = fakeread
            if insert_os:
                args.append(os)
            f(*args, **kwargs)
            # Short-circuit if we raised an error in fakeread()
            if trailing_error:
                return
            # Sanity checks to make sure the stuff we mocked, actually got ran!
            # TODO: inject our mocks back into the tests so they can make their
            # own assertions if desired
            pty.fork.assert_called_with()
            # Test the 2nd call to ioctl; the 1st call is doing TIOGSWINSZ
            eq_(ioctl.call_args_list[1][0][1], termios.TIOCSWINSZ)
            if not skip_asserts:
                for name in ('execve', 'waitpid', 'WEXITSTATUS'):
                    assert getattr(os, name).called
Exemple #20
0
 def exceptions_get_logged(self, mock_debug):
     # Make write_stdin asplode
     klass = self._mock_stdin_writer()
     klass.write_stdin.side_effect = OhNoz("oh god why")
     # Execute with some stdin to trigger that asplode (but skip the
     # actual bubbled-up raising of it so we can check things out)
     try:
         stdin = StringIO("non-empty")
         self._runner(klass=klass).run(_, in_stream=stdin)
     except ThreadException:
         pass
     # Assert debug() was called w/ expected format
     # TODO: make the debug call a method on IOThread, then make thread
     # class configurable somewhere in Runner, and pass in a customized
     # IOThread that has a Mock for that method?
     mock_debug.assert_called_with("Encountered exception OhNoz('oh god why',) in IO thread for 'handle_stdin'") # noqa
Exemple #21
0
    class input_stream_handling:
        # NOTE: actual autoresponder tests are elsewhere. These just test that
        # stdin works normally & can be overridden.
        @patch('invoke.runners.sys.stdin', StringIO("Text!"))
        def defaults_to_sys_stdin(self):
            # Execute w/ runner class that has a mocked stdin_writer
            klass = self._mock_stdin_writer()
            self._runner(klass=klass).run(_, out_stream=StringIO())
            # Check that mocked writer was called w/ the data from our patched
            # sys.stdin (one char at a time)
            calls = list(map(lambda x: call(x), "Text!"))
            klass.write_proc_stdin.assert_has_calls(calls, any_order=False)

        def can_be_overridden(self):
            klass = self._mock_stdin_writer()
            in_stream = StringIO("Hey, listen!")
            self._runner(klass=klass).run(
                _,
                in_stream=in_stream,
                out_stream=StringIO(),
            )
            # stdin mirroring occurs char-by-char
            calls = list(map(lambda x: call(x), "Hey, listen!"))
            klass.write_proc_stdin.assert_has_calls(calls, any_order=False)

        @patch('invoke.util.debug')
        def exceptions_get_logged(self, mock_debug):
            # Make write_proc_stdin asplode
            klass = self._mock_stdin_writer()
            klass.write_proc_stdin.side_effect = OhNoz("oh god why")
            # Execute with some stdin to trigger that asplode (but skip the
            # actual bubbled-up raising of it so we can check things out)
            try:
                stdin = StringIO("non-empty")
                self._runner(klass=klass).run(_, in_stream=stdin)
            except ThreadException:
                pass
            # Assert debug() was called w/ expected format
            # TODO: make the debug call a method on ExceptionHandlingThread,
            # then make thread class configurable somewhere in Runner, and pass
            # in a customized ExceptionHandlingThread that has a Mock for that
            # method?
            mock_debug.assert_called_with(
                "Encountered exception OhNoz('oh god why',) in thread for 'handle_stdin'"
            )  # noqa
Exemple #22
0
    def open_gateway(self):
        """
        Obtain a socket-like object from `gateway`.

        :returns:
            A ``direct-tcpip`` `paramiko.channel.Channel`, if `gateway` was a
            `.Connection`; or a `~paramiko.proxy.ProxyCommand`, if `gateway`
            was a string.

        .. versionadded:: 2.0
        """
        # ProxyCommand is faster to set up, so do it first.
        if isinstance(self.gateway, string_types):
            # Leverage a dummy SSHConfig to ensure %h/%p/etc are parsed.
            # TODO: use real SSH config once loading one properly is
            # implemented.
            ssh_conf = SSHConfig()
            dummy = "Host {}\n    ProxyCommand {}"
            ssh_conf.parse(StringIO(dummy.format(self.host, self.gateway)))
            return ProxyCommand(ssh_conf.lookup(self.host)["proxycommand"])
        # Handle inner-Connection gateway type here.
        # TODO: logging
        self.gateway.open()
        # TODO: expose the opened channel itself as an attribute? (another
        # possible argument for separating the two gateway types...) e.g. if
        # someone wanted to piggyback on it for other same-interpreter socket
        # needs...
        # TODO: and the inverse? allow users to supply their own socket/like
        # object they got via $WHEREEVER?
        # TODO: how best to expose timeout param? reuse general connection
        # timeout from config?
        return self.gateway.transport.open_channel(
            kind="direct-tcpip",
            dest_addr=(self.host, int(self.port)),
            # NOTE: src_addr needs to be 'empty but not None' values to
            # correctly encode into a network message. Theoretically Paramiko
            # could auto-interpret None sometime & save us the trouble.
            src_addr=("", 0),
        )
Exemple #23
0
 def out_can_be_overridden(self):
     "out_stream can be overridden"
     out = StringIO()
     self._runner(out="sup").run(_, out_stream=out)
     eq_(out.getvalue(), "sup")
     eq_(sys.stdout.getvalue(), "")
Exemple #24
0
 def err_can_be_overridden(self):
     "err_stream can be overridden"
     err = StringIO()
     self._runner(err="sup").run(_, err_stream=err)
     eq_(err.getvalue(), "sup")
     eq_(sys.stderr.getvalue(), "")
Exemple #25
0
 def setcbreak_not_called_on_non_tty_stdins(self, mock_tty):
     self._run(_, in_stream=StringIO())
     eq_(mock_tty.setcbreak.call_args_list, [])
Exemple #26
0
 def remote_cant_be_empty_if_local_file_like(self, transfer):
     transfer.put(StringIO())
 def no_name_attribute_raises_ValueError(self, sftp_objs):
     xfer, sftp = sftp_objs
     sftp.stat.return_value.st_mode = 0o41777
     local = StringIO("sup\n")
     xfer.put(local, remote="/dir/path")
Exemple #28
0
def publish(c,
            sdist=True,
            wheel=False,
            index=None,
            sign=False,
            dry_run=False,
            directory=None,
            dual_wheels=False,
            alt_python=None,
            check_desc=False):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` and ``directory`` honor config settings of
    the same name, under the ``packaging`` tree. E.g. say
    ``.configure({'packaging': {'wheel': True}})`` to force building wheel
    archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.

    :param str directory:
        Base directory within which will live the ``dist/`` and ``build/``
        directories.

        Defaults to a temporary directory which is cleaned up after the run
        finishes.

    :param bool dual_wheels:
        When ``True``, builds individual wheels for Python 2 and Python 3.

        Useful for situations where you can't build universal wheels, but still
        want to distribute for both interpreter versions.

        Requires that you have a useful ``python3`` (or ``python2``, if you're
        on Python 3 already) binary in your ``$PATH``. Also requires that this
        other python have the ``wheel`` package installed in its
        ``site-packages``; usually this will mean the global site-packages for
        that interpreter.

        See also the ``alt_python`` argument.

    :param str alt_python:
        Path to the 'alternate' Python interpreter to use when
        ``dual_wheels=True``.

        When ``None`` (the default) will be ``python3`` or ``python2``,
        depending on the currently active interpreter.

    :param bool check_desc:
        Whether to run ``setup.py check -r -s`` (uses ``readme_renderer``)
        before trying to publish - catches long_description bugs. Default:
        ``False``.
    """
    # Don't hide by default, this step likes to be verbose most of the time.
    c.config.run.hide = False
    # Config hooks
    config = c.config.get('packaging', {})
    index = config.get('index', index)
    sign = config.get('sign', sign)
    dual_wheels = config.get('dual_wheels', dual_wheels)
    check_desc = config.get('check_desc', check_desc)
    # Initial sanity check, if needed. Will die usefully. (Or, on Python 2.6 or
    # 3.3, it will die no matter what, as they never got the fix from
    # https://bugs.python.org/issue23063 ...so we gotta skip it there.)
    ver = sys.version_info[:2]
    if check_desc and ver not in [(2, 6), (3, 3)]:
        c.run("python setup.py check -r -s")
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir(skip_cleanup=dry_run, explicit=directory) as tmp:
        # Build default archives
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Build opposing interpreter archive, if necessary
        if dual_wheels:
            if not alt_python:
                alt_python = 'python2'
                if sys.version_info[0] == 2:
                    alt_python = 'python3'
            build(c, sdist=False, wheel=True, directory=tmp, python=alt_python)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(
            itertools.chain.from_iterable(
                glob(os.path.join(tmp, 'dist', '*.{0}'.format(extension)))
                for extension in ('whl', 'tar.gz')))
        # Sign each archive in turn
        # TODO: twine has a --sign option; but the below is still nice insofar
        # as it lets us dry-run, generate for web upload when pypi's API is
        # being cranky, etc. Figure out which is better.
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            gpg_bin = find_gpg(c)
            if not gpg_bin:
                sys.exit(
                    "You need to have one of `gpg`, `gpg1` or `gpg2` installed to GPG-sign!"
                )  # noqa
            for archive in archives:
                cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(
                    gpg_bin)  # noqa
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(
                    0)  # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives[:]
        if sign:
            paths.append(os.path.join(tmp, 'dist', "*.asc"))
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #29
0
def publish(c, sdist=True, wheel=False, index=None, sign=False, dry_run=False):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` honor config settings of the same name,
    under the ``packaging`` tree. E.g. say ``.configure({'packaging': {'wheel':
    True}})`` to force building wheel archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.
    """
    # Config hooks
    config = c.config.get('packaging', {})
    index = config.get('index', index)
    sign = config.get('sign', sign)
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir() as tmp:
        # Build
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(
            itertools.chain.from_iterable(
                glob(os.path.join(tmp, '*.{0}'.format(extension)))
                for extension in ('whl', 'tar.gz')))
        # Sign each archive in turn
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            for archive in archives:
                cmd = "gpg --detach-sign -a --passphrase-fd 0 {0}"
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(
                    0)  # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives + [os.path.join(tmp, "*.asc")]
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #30
0
def publish(c, sdist=True, wheel=False, index=None, sign=False, dry_run=False):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` honor config settings of the same name,
    under the ``packaging`` tree. E.g. say ``.configure({'packaging': {'wheel':
    True}})`` to force building wheel archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.
    """
    # Config hooks
    config = c.config.get('packaging', {})
    index = config.get('index', index)
    sign = config.get('sign', sign)
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir() as tmp:
        # Build
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(itertools.chain.from_iterable(
            glob(os.path.join(tmp, '*.{0}'.format(extension)))
            for extension in ('whl', 'tar.gz')
        ))
        # Sign each archive in turn
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            for archive in archives:
                cmd = "gpg --detach-sign -a --passphrase-fd 0 {0}"
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(0) # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives + [os.path.join(tmp, "*.asc")]
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #31
0
 def err_can_be_overridden(self):
     "err_stream can be overridden"
     err = StringIO()
     self._runner(err="sup").run(_, err_stream=err)
     eq_(err.getvalue(), "sup")
     eq_(sys.stderr.getvalue(), "")
Exemple #32
0
 def pty_out_can_be_overridden(self):
     out = StringIO()
     self._runner(out="yo").run(_, pty=True, out_stream=out)
     eq_(out.getvalue(), "yo")
     eq_(sys.stdout.getvalue(), "")
Exemple #33
0
 def out_can_be_overridden(self):
     "out_stream can be overridden"
     out = StringIO()
     self._runner(out="sup").run(_, out_stream=out)
     eq_(out.getvalue(), "sup")
     eq_(sys.stdout.getvalue(), "")
Exemple #34
0
 def writes_remote_streams_to_local_streams(self, remote):
     remote.expect(out=b"hello yes this is dog")
     fakeout = StringIO()
     _runner().run(CMD, out_stream=fakeout)
     assert fakeout.getvalue() == "hello yes this is dog"
Exemple #35
0
def publish(
    c,
    sdist=True,
    wheel=False,
    index=None,
    sign=False,
    dry_run=False,
    directory=None,
    dual_wheels=False,
    alt_python=None,
):
    """
    Publish code to PyPI or index of choice.

    All parameters save ``dry_run`` and ``directory`` honor config settings of
    the same name, under the ``packaging`` tree. E.g. say
    ``.configure({'packaging': {'wheel': True}})`` to force building wheel
    archives by default.

    :param bool sdist:
        Whether to upload sdists/tgzs.

    :param bool wheel:
        Whether to upload wheels (requires the ``wheel`` package from PyPI).

    :param str index:
        Custom upload index URL.

        By default, uses whatever the invoked ``pip`` is configured to use.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.

    :param str directory:
        Base directory within which will live the ``dist/`` and ``build/``
        directories.

        Defaults to a temporary directory which is cleaned up after the run
        finishes.

    :param bool dual_wheels:
        When ``True``, builds individual wheels for Python 2 and Python 3.

        Useful for situations where you can't build universal wheels, but still
        want to distribute for both interpreter versions.

        Requires that you have a useful ``python3`` (or ``python2``, if you're
        on Python 3 already) binary in your ``$PATH``. Also requires that this
        other python have the ``wheel`` package installed in its
        ``site-packages``; usually this will mean the global site-packages for
        that interpreter.

        See also the ``alt_python`` argument.

    :param str alt_python:
        Path to the 'alternate' Python interpreter to use when
        ``dual_wheels=True``.

        When ``None`` (the default) will be ``python3`` or ``python2``,
        depending on the currently active interpreter.
    """
    # Don't hide by default, this step likes to be verbose most of the time.
    c.config.run.hide = False
    # Config hooks
    config = c.config.get("packaging", {})
    index = config.get("index", index)
    sign = config.get("sign", sign)
    dual_wheels = config.get("dual_wheels", dual_wheels)
    # Build, into controlled temp dir (avoids attempting to re-upload old
    # files)
    with tmpdir(skip_cleanup=dry_run, explicit=directory) as tmp:
        # Build default archives
        build(c, sdist=sdist, wheel=wheel, directory=tmp)
        # Build opposing interpreter archive, if necessary
        if dual_wheels:
            if not alt_python:
                alt_python = "python2"
                if sys.version_info[0] == 2:
                    alt_python = "python3"
            build(c, sdist=False, wheel=True, directory=tmp, python=alt_python)
        # Obtain list of archive filenames, then ensure any wheels come first
        # so their improved metadata is what PyPI sees initially (otherwise, it
        # only honors the sdist's lesser data).
        archives = list(
            itertools.chain.from_iterable(
                glob(os.path.join(tmp, "dist", "*.{0}".format(extension))) for extension in ("whl", "tar.gz")
            )
        )
        # Sign each archive in turn
        # TODO: twine has a --sign option; but the below is still nice insofar
        # as it lets us dry-run, generate for web upload when pypi's API is
        # being cranky, etc. Figure out which is better.
        if sign:
            prompt = "Please enter GPG passphrase for signing: "
            input_ = StringIO(getpass.getpass(prompt) + "\n")
            gpg_bin = find_gpg(c)
            if not gpg_bin:
                sys.exit("You need to have one of `gpg`, `gpg1` or `gpg2` installed to GPG-sign!")  # noqa
            for archive in archives:
                cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(gpg_bin)  # noqa
                c.run(cmd.format(archive), in_stream=input_)
                input_.seek(0)  # So it can be replayed by subsequent iterations
        # Upload
        parts = ["twine", "upload"]
        if index:
            index_arg = "-r {0}".format(index)
        if index:
            parts.append(index_arg)
        paths = archives + [os.path.join(tmp, "dist", "*.asc")]
        parts.extend(paths)
        cmd = " ".join(parts)
        if dry_run:
            print("Would publish via: {0}".format(cmd))
            print("Files that would be published:")
            c.run("ls -l {0}".format(" ".join(paths)))
        else:
            c.run(cmd)
Exemple #36
0
 def writes_remote_streams_to_local_streams(self, remote):
     remote.expect(out=b"hello yes this is dog")
     fakeout = StringIO()
     _runner().run(CMD, out_stream=fakeout)
     assert fakeout.getvalue() == "hello yes this is dog"
Exemple #37
0
 def pty_out_can_be_overridden(self):
     out = StringIO()
     self._runner(out="yo").run(_, pty=True, out_stream=out)
     eq_(out.getvalue(), "yo")
     eq_(sys.stdout.getvalue(), "")
Exemple #38
0
def upload(c, directory, index=None, sign=False, dry_run=False):
    """
    Upload (potentially also signing) all artifacts in ``directory``.

    :param str index:
        Custom upload index/repository name.

        By default, uses whatever the invoked ``pip`` is configured to use.
        Modify your ``pypirc`` file to add new named repositories.

    :param bool sign:
        Whether to sign the built archive(s) via GPG.

    :param bool dry_run:
        Skip actual publication step if ``True``.

        This also prevents cleanup of the temporary build/dist directories, so
        you can examine the build artifacts.
    """
    # Obtain list of archive filenames, then ensure any wheels come first
    # so their improved metadata is what PyPI sees initially (otherwise, it
    # only honors the sdist's lesser data).
    archives = list(
        itertools.chain.from_iterable(
            glob(os.path.join(directory, "dist", "*.{0}".format(extension)))
            for extension in ("whl", "tar.gz")
        )
    )
    # Sign each archive in turn
    # TODO: twine has a --sign option; but the below is still nice insofar
    # as it lets us dry-run, generate for web upload when pypi's API is
    # being cranky, etc. Figure out which is better.
    if sign:
        prompt = "Please enter GPG passphrase for signing: "
        input_ = StringIO(getpass.getpass(prompt) + "\n")
        gpg_bin = find_gpg(c)
        if not gpg_bin:
            sys.exit(
                "You need to have one of `gpg`, `gpg1` or `gpg2` "
                "installed to GPG-sign!"
            )
        for archive in archives:
            cmd = "{0} --detach-sign -a --passphrase-fd 0 {{0}}".format(
                gpg_bin
            )  # noqa
            c.run(cmd.format(archive), in_stream=input_)
            input_.seek(0)  # So it can be replayed by subsequent iterations
    # Upload
    parts = ["twine", "upload"]
    if index:
        index_arg = "--repository {0}".format(index)
    if index:
        parts.append(index_arg)
    paths = archives[:]
    if sign:
        paths.append(os.path.join(directory, "dist", "*.asc"))
    parts.extend(paths)
    cmd = " ".join(parts)
    if dry_run:
        print("Would publish via: {0}".format(cmd))
        print("Files that would be published:")
        c.run("ls -l {0}".format(" ".join(paths)))
    else:
        c.run(cmd)
Exemple #39
0
    class input_stream_handling:
        # NOTE: actual autoresponder tests are elsewhere. These just test that
        # stdin works normally & can be overridden.
        @patch('invoke.runners.sys.stdin', StringIO("Text!"))
        def defaults_to_sys_stdin(self):
            # Execute w/ runner class that has a mocked stdin_writer
            klass = self._mock_stdin_writer()
            self._runner(klass=klass).run(_, out_stream=StringIO())
            # Check that mocked writer was called w/ the data from our patched
            # sys.stdin.
            # NOTE: this also tests that non-fileno-bearing streams read/write
            # 1 byte at a time. See farther-down test for fileno-bearing stdin
            calls = list(map(lambda x: call(x), "Text!"))
            klass.write_proc_stdin.assert_has_calls(calls, any_order=False)

        def can_be_overridden(self):
            klass = self._mock_stdin_writer()
            in_stream = StringIO("Hey, listen!")
            self._runner(klass=klass).run(
                _,
                in_stream=in_stream,
                out_stream=StringIO(),
            )
            # stdin mirroring occurs char-by-char
            calls = list(map(lambda x: call(x), "Hey, listen!"))
            klass.write_proc_stdin.assert_has_calls(calls, any_order=False)

        @patch('invoke.util.debug')
        def exceptions_get_logged(self, mock_debug):
            # Make write_proc_stdin asplode
            klass = self._mock_stdin_writer()
            klass.write_proc_stdin.side_effect = OhNoz("oh god why")
            # Execute with some stdin to trigger that asplode (but skip the
            # actual bubbled-up raising of it so we can check things out)
            try:
                stdin = StringIO("non-empty")
                self._runner(klass=klass).run(_, in_stream=stdin)
            except ThreadException:
                pass
            # Assert debug() was called w/ expected format
            # TODO: make the debug call a method on ExceptionHandlingThread,
            # then make thread class configurable somewhere in Runner, and pass
            # in a customized ExceptionHandlingThread that has a Mock for that
            # method?
            mock_debug.assert_called_with("Encountered exception OhNoz('oh god why',) in thread for 'handle_stdin'") # noqa

        @trap
        @skip_if_windows
        @patch('invoke.runners.sys.stdin')
        @patch('invoke.platform.fcntl.ioctl')
        @patch('invoke.platform.termios')
        @patch('invoke.platform.tty')
        @patch('invoke.platform.select')
        # NOTE: the no-fileno edition is handled at top of this local test
        # class, in the base case test.
        def reads_FIONREAD_bytes_from_stdin_when_fileno(
            self, select, tty, termios, ioctl, stdin
        ):
            # Set stdin up as a file-like buffer which passes has_fileno
            stdin.fileno.return_value = 17 # arbitrary
            stdin_data = list("boo!")
            def fakeread(n):
                # Why is there no slice version of pop()?
                data = stdin_data[:n]
                del stdin_data[:n]
                return ''.join(data)
            stdin.read.side_effect = fakeread
            # Ensure select() only spits back stdin one time, despite there
            # being multiple bytes to read (this at least partly fakes behavior
            # from issue #58)
            select.select.side_effect = chain(
                [([stdin], [], [])],
                repeat(([], [], [])),
            )
            # Have ioctl yield our multiple number of bytes when called with
            # FIONREAD
            def fake_ioctl(fd, cmd, buf):
                # This works since each mocked attr will still be its own mock
                # object with a distinct 'is' identity.
                if cmd is termios.FIONREAD:
                    return struct.pack('h', len(stdin_data))
            ioctl.side_effect = fake_ioctl
            # Set up our runner as one w/ mocked stdin writing (simplest way to
            # assert how the reads & writes are happening)
            klass = self._mock_stdin_writer()
            self._runner(klass=klass).run(_)
            klass.write_proc_stdin.assert_called_once_with("boo!")