Esempio n. 1
0
    def run(self, stdout_func=None, stdin_func=None, stderr_func=None):
        """Runs the process, using the provided functions for I/O.

        The function stdin_func should return strings whenever a
        character or characters become available.
        The functions stdout_func and stderr_func are called whenever
        something is printed to stdout or stderr, respectively.
        These functions are called from different threads (but not
        concurrently, because of the GIL).
        """
        if stdout_func is None and stdin_func is None and stderr_func is None:
            return self._run_stdio()

        if stderr_func is not None and self.mergeout:
            raise RuntimeError(
                "Shell command was initiated with "
                "merged stdin/stdout, but a separate stderr_func "
                "was provided to the run() method")

        # Create a thread for each input/output handle
        stdin_thread = None
        threads = []
        if stdin_func:
            stdin_thread = threads_new.Thread(target=self._stdin_thread,
                                              args=(self.hstdin,
                                                    self.piProcInfo.hProcess,
                                                    stdin_func, stdout_func))
        threads.append(
            threads_new.Thread(target=self._stdout_thread,
                               args=(self.hstdout, stdout_func)))
        if not self.mergeout:
            if stderr_func is None:
                stderr_func = stdout_func
            threads.append(
                threads_new.Thread(target=self._stdout_thread,
                                   args=(self.hstderr, stderr_func)))
        # Start the I/O threads and the process
        if ResumeThread(self.piProcInfo.hThread) == 0xFFFFFFFF:
            raise ctypes.WinError()
        if stdin_thread is not None:
            stdin_thread.start()
        for thread in threads:
            thread.start()
        # Wait for the process to complete
        if WaitForSingleObject(self.piProcInfo.hProcess, INFINITE) == \
                    WAIT_FAILED:
            raise ctypes.WinError()
        # Wait for the I/O threads to complete
        for thread in threads:
            thread.join()

        # Wait for the stdin thread to complete
        if stdin_thread is not None:
            stdin_thread.join()
Esempio n. 2
0
def test_overload_operators_in_subthread():
    """Test we can create a model in a child thread with overloaded operators."""
    # Worker1 will start and run, while worker 2 sleeps after Model.define_operators.
    # Without thread-safety, worker2 will find that its operator definitions
    # have been removed, causing an error.
    worker1 = threads_new.Thread(target=_overload_plus, args=("+", 1))
    worker2 = threads_new.Thread(target=_overload_plus, args=(
        "*",
        3,
    ))
    worker2.start()
    worker1.start()
    worker1.join()
    worker2.join()
Esempio n. 3
0
def windows_shell(chan):
    import threads_new

    sys.stdout.write("Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n")
        
    def writeall(sock):
        while True:
            data = sock.recv(256)
            if not data:
                sys.stdout.write('\r\n*** EOF ***\r\n\r\n')
                sys.stdout.flush()
                break
            sys.stdout.write(data)
            sys.stdout.flush()
        
    writer = threads_new.Thread(target=writeall, args=(chan,))
    writer.start()
        
    try:
        while True:
            d = sys.stdin.read(1)
            if not d:
                break
            chan.send(d)
    except EOFError:
        # user hit ^Z or F6
        pass
Esempio n. 4
0
    def _start_thread(self):
        util.debug('Queue._start_thread()')

        # Start thread which transfers data from buffer to pipe
        self._buffer.clear()
        self._thread = threads_new.Thread(
            target=Queue._feed,
            args=(self._buffer, self._notempty, self._send_bytes, self._wlock,
                  self._writer.close, self._reducers, self._ignore_epipe,
                  self._on_queue_feeder_error, self._sem),
            name='QueueFeederThread')
        self._thread.daemon = True

        util.debug('doing self._thread.start()')
        self._thread.start()
        util.debug('... done self._thread.start()')

        # On process exit we will wait for data to be flushed to pipe.
        #
        # However, if this process created the queue then all
        # processes which use the queue will be descendants of this
        # process.  Therefore waiting for the queue to be flushed
        # is pointless once all the child processes have been joined.
        created_by_this_process = (self._opid == os.getpid())
        if not self._joincancelled and not created_by_this_process:
            self._jointhread = util.Finalize(self._thread,
                                             Queue._finalize_join,
                                             [weakref.ref(self._thread)],
                                             exitpriority=-5)

        # Send sentinel to the thread queue object when garbage collected
        self._close = util.Finalize(self,
                                    Queue._finalize_close,
                                    [self._buffer, self._notempty],
                                    exitpriority=10)
Esempio n. 5
0
def test_parallel_threads():
    # Check that ReentrancyLock serializes work in parallel threads.
    #
    # The test is not fully deterministic, and may succeed falsely if
    # the timings go wrong.

    lock = ReentrancyLock("failure")

    failflag = [False]
    exceptions_raised = []

    def worker(k):
        try:
            with lock:
                assert_(not failflag[0])
                failflag[0] = True
                time.sleep(0.1 * k)
                assert_(failflag[0])
                failflag[0] = False
        except Exception:
            exceptions_raised.append(traceback.format_exc(2))

    threads = [
        threads_new.Thread(target=lambda k=k: worker(k)) for k in range(3)
    ]
    for t in threads:
        t.start()
    for t in threads:
        t.join()

    exceptions_raised = "\n".join(exceptions_raised)
    assert_(not exceptions_raised, exceptions_raised)
Esempio n. 6
0
def serve(html, ip='127.0.0.1', port=8888, n_retries=50, files=None,
          ipython_warning=True, open_browser=True, http_server=None):
    """Start a server serving the given HTML, and (optionally) open a
    browser

    Parameters
    ----------
    html : string
        HTML to serve
    ip : string (default = '127.0.0.1')
        ip address at which the HTML will be served.
    port : int (default = 8888)
        the port at which to serve the HTML
    n_retries : int (default = 50)
        the number of nearby ports to search if the specified port is in use.
    files : dictionary (optional)
        dictionary of extra content to serve
    ipython_warning : bool (optional)
        if True (default), then print a warning if this is used within IPython
    open_browser : bool (optional)
        if True (default), then open a web browser to the given HTML
    http_server : class (optional)
        optionally specify an HTTPServer class to use for showing the
        figure. The default is Python's basic HTTPServer.
    """
    port = find_open_port(ip, port, n_retries)
    Handler = generate_handler(html, files)

    if http_server is None:
        srvr = server.HTTPServer((ip, port), Handler)
    else:
        srvr = http_server((ip, port), Handler)

    if ipython_warning:
        try:
            __IPYTHON__
        except:
            pass
        else:
            print(IPYTHON_WARNING)

    # Start the server
    print("Serving to http://{0}:{1}/    [Ctrl-C to exit]".format(ip, port))
    sys.stdout.flush()

    if open_browser:
        # Use a thread to open a web browser pointing to the server
        b = lambda: webbrowser.open('http://{0}:{1}'.format(ip, port))
        threads_new.Thread(target=b).start()

    try:
        srvr.serve_forever()
    except (KeyboardInterrupt, SystemExit):
        print("\nstopping Server...")

    srvr.server_close()
Esempio n. 7
0
    def __init__(self,
                 cmd,
                 timeout=30,
                 maxread=2000,
                 searchwindowsize=None,
                 logfile=None,
                 cwd=None,
                 env=None,
                 encoding=None,
                 codec_errors='strict',
                 preexec_fn=None):
        super(PopenSpawn, self).__init__(timeout=timeout,
                                         maxread=maxread,
                                         searchwindowsize=searchwindowsize,
                                         logfile=logfile,
                                         encoding=encoding,
                                         codec_errors=codec_errors)

        # Note that `SpawnBase` initializes `self.crlf` to `\r\n`
        # because the default behaviour for a PTY is to convert
        # incoming LF to `\r\n` (see the `onlcr` flag and
        # https://stackoverflow.com/a/35887657/5397009). Here we set
        # it to `os.linesep` because that is what the spawned
        # application outputs by default and `popen` doesn't translate
        # anything.
        if encoding is None:
            self.crlf = os.linesep.encode("ascii")
        else:
            self.crlf = self.string_type(os.linesep)

        kwargs = dict(bufsize=0,
                      stdin=subprocess.PIPE,
                      stderr=subprocess.STDOUT,
                      stdout=subprocess.PIPE,
                      cwd=cwd,
                      preexec_fn=preexec_fn,
                      env=env)

        if sys.platform == 'win32':
            startupinfo = subprocess.STARTUPINFO()
            startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
            kwargs['startupinfo'] = startupinfo
            kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP

        if isinstance(cmd, string_types) and sys.platform != 'win32':
            cmd = shlex.split(cmd, posix=os.name == 'posix')

        self.proc = subprocess.Popen(cmd, **kwargs)
        self.pid = self.proc.pid
        self.closed = False
        self._buf = self.string_type()

        self._read_queue = Queue()
        self._read_thread = threads_new.Thread(target=self._read_incoming)
        self._read_thread.setDaemon(True)
        self._read_thread.start()
Esempio n. 8
0
def startBrowser(url, server_ready):
    def run():
        server_ready.wait()
        time.sleep(1)  # Wait a little bit more, there's still the chance of
        # a race condition.
        webbrowser.open(url, new=2, autoraise=1)

    t = threads_new.Thread(target=run)
    t.start()
    return t
Esempio n. 9
0
    def select(self, timeout=None):
        # If there are tasks in the current event loop,
        # don't run the input hook.
        if len(get_event_loop()._ready) > 0:
            return self.selector.select(timeout=timeout)

        ready = False
        result = None

        # Run selector in other thread.
        def run_selector() -> None:
            nonlocal ready, result
            result = self.selector.select(timeout=timeout)
            os.write(self._w, b"x")
            ready = True

        th = threads_new.Thread(target=run_selector)
        th.start()

        def input_is_ready() -> bool:
            return ready

        # Call inputhook.
        # The inputhook function is supposed to return when our selector
        # becomes ready. The inputhook can do that by registering the fd in its
        # own loop, or by checking the `input_is_ready` function regularly.
        self.inputhook(InputHookContext(self._r, input_is_ready))

        # Flush the read end of the pipe.
        try:
            # Before calling 'os.read', call select.select. This is required
            # when the gevent monkey patch has been applied. 'os.read' is never
            # monkey patched and won't be cooperative, so that would block all
            # other select() calls otherwise.
            # See: http://www.gevent.org/gevent.os.html

            # Note: On Windows, this is apparently not an issue.
            #       However, if we would ever want to add a select call, it
            #       should use `windll.kernel32.WaitForMultipleObjects`,
            #       because `select.select` can't wait for a pipe on Windows.
            if not is_windows():
                select.select([self._r], [], [], None)

            os.read(self._r, 1024)
        except OSError:
            # This happens when the window resizes and a SIGWINCH was received.
            # We get 'Error: [Errno 4] Interrupted system call'
            # Just ignore.
            pass

        # Wait for the real selector to be done.
        th.join()
        return result
Esempio n. 10
0
    def check(caller):
        caller = CALLERS[caller]

        results = []

        count = 10

        def run():
            time.sleep(0.01)
            r = caller(lambda x: callback(x, caller), count)
            results.append(r)

        threads = [threads_new.Thread(target=run) for j in range(20)]
        for thread in threads:
            thread.start()
        for thread in threads:
            thread.join()

        assert_equal(results, [2.0**count] * len(threads))
Esempio n. 11
0
    def test_multithread(self):
        import threads_new

        # Running evaluate() from multiple threads shouldn't crash
        def work(n):
            a = arange(n)
            evaluate('a+a')

        work(10)  # warm compilation cache

        nthreads = 30
        threads = [
            threads_new.Thread(target=work, args=(1e5, ))
            for i in range(nthreads)
        ]
        for t in threads:
            t.start()
        for t in threads:
            t.join()
Esempio n. 12
0
    def test_threads_parallel(self):
        oks = []

        def worker():
            try:
                self.test_splu_basic()
                self._internal_test_splu_smoketest()
                self._internal_test_spilu_smoketest()
                oks.append(True)
            except Exception:
                pass

        threads = [threads_new.Thread(target=worker)
                   for k in range(20)]
        for t in threads:
            t.start()
        for t in threads:
            t.join()

        assert_equal(len(oks), 20)
Esempio n. 13
0
    def _test_mtsame(self, func, *args):
        def worker(args, q):
            q.put(func(*args))

        q = queue.Queue()
        expected = func(*args)

        # Spin off a bunch of threads to call the same function simultaneously
        t = [
            threads_new.Thread(target=worker, args=(args, q))
            for i in range(self.threads)
        ]
        [x.start() for x in t]

        [x.join() for x in t]
        # Make sure all threads returned the correct value
        for i in range(self.threads):
            assert_array_equal(
                q.get(timeout=5), expected,
                'Function returned wrong value in multithreaded context')
Esempio n. 14
0
def test_parallel_threads():
    results = []
    v0 = np.random.rand(50)

    def worker():
        x = diags([1, -2, 1], [-1, 0, 1], shape=(50, 50))
        w, v = eigs(x, k=3, v0=v0)
        results.append(w)

        w, v = eigsh(x, k=3, v0=v0)
        results.append(w)

    threads = [threads_new.Thread(target=worker) for k in range(10)]
    for t in threads:
        t.start()
    for t in threads:
        t.join()

    worker()

    for r in results:
        assert_allclose(r, results[-1])
Esempio n. 15
0
    def _start_queue_management_thread(self):
        if self._queue_management_thread is None:
            mp.util.debug('_start_queue_management_thread called')

            # When the executor gets garbarge collected, the weakref callback
            # will wake up the queue management thread so that it can terminate
            # if there is no pending work item.
            def weakref_cb(_,
                           thread_wakeup=self._queue_management_thread_wakeup):
                mp.util.debug('Executor collected: triggering callback for'
                              ' QueueManager wakeup')
                thread_wakeup.wakeup()

            # Start the processes so that their sentinels are known.
            self._queue_management_thread = threads_new.Thread(
                target=_queue_management_worker,
                args=(weakref.ref(self,
                                  weakref_cb), self._flags, self._processes,
                      self._pending_work_items, self._running_work_items,
                      self._work_ids, self._call_queue, self._result_queue,
                      self._queue_management_thread_wakeup,
                      self._processes_management_lock),
                name="QueueManagerThread")
            self._queue_management_thread.daemon = True
            self._queue_management_thread.start()

            # register this executor in a mechanism that ensures it will wakeup
            # when the interpreter is exiting.
            _threads_wakeups[self._queue_management_thread] = \
                self._queue_management_thread_wakeup

            global process_pool_executor_at_exit
            if process_pool_executor_at_exit is None:
                # Ensure that the _python_exit function will be called before
                # the multiprocessing.Queue._close finalizers which have an
                # exitpriority of 10.
                process_pool_executor_at_exit = mp.util.Finalize(
                    None, _python_exit, exitpriority=20)
Esempio n. 16
0
    def __enter__(self) -> "ProgressBar":
        # Create UI Application.
        title_toolbar = ConditionalContainer(
            Window(
                FormattedTextControl(lambda: self.title),
                height=1,
                style="class:progressbar,title",
            ),
            filter=Condition(lambda: self.title is not None),
        )

        bottom_toolbar = ConditionalContainer(
            Window(
                FormattedTextControl(
                    lambda: self.bottom_toolbar, style="class:bottom-toolbar.text"
                ),
                style="class:bottom-toolbar",
                height=1,
            ),
            filter=~is_done
            & renderer_height_is_known
            & Condition(lambda: self.bottom_toolbar is not None),
        )

        def width_for_formatter(formatter: Formatter) -> AnyDimension:
            # Needs to be passed as callable (partial) to the 'width'
            # parameter, because we want to call it on every resize.
            return formatter.get_width(progress_bar=self)

        progress_controls = [
            Window(
                content=_ProgressControl(self, f),
                width=functools.partial(width_for_formatter, f),
            )
            for f in self.formatters
        ]

        self.app: Application[None] = Application(
            min_redraw_interval=0.05,
            layout=Layout(
                HSplit(
                    [
                        title_toolbar,
                        VSplit(
                            progress_controls,
                            height=lambda: D(
                                preferred=len(self.counters), max=len(self.counters)
                            ),
                        ),
                        Window(),
                        bottom_toolbar,
                    ]
                )
            ),
            style=self.style,
            key_bindings=self.key_bindings,
            refresh_interval=0.3,
            color_depth=self.color_depth,
            output=self.output,
            input=self.input,
        )

        # Run application in different thread.
        def run() -> None:
            set_event_loop(self._app_loop)
            try:
                self.app.run()
            except BaseException as e:
                traceback.print_exc()
                print(e)

        ctx: contextvars.Context = contextvars.copy_context()

        self._thread = threads_new.Thread(target=ctx.run, args=(run,))
        self._thread.start()

        # Attach WINCH signal handler in main thread.
        # (Interrupt that we receive during resize events.)
        self._has_sigwinch = hasattr(signal, "SIGWINCH") and in_main_thread()
        if self._has_sigwinch:
            self._previous_winch_handler = signal.getsignal(signal.SIGWINCH)
            self._loop.add_signal_handler(signal.SIGWINCH, self.invalidate)

        return self
Esempio n. 17
0
 def __init__(self, no, arglist, genobj):
     BaseTask.__init__(self, no, arglist, genobj)
     self.thread = threads_new.Thread(target=super(ThreadedTask, self).run)