Ejemplo n.º 1
0
 def start(self, ctx):
     player = BigWorld.player()
     if self.user is not None and self.user != player.databaseID:
         self.old_user = player.databaseID
         self.old_playerName = player.name
         self._thread_analytics = threading.Thread(
             target=self.end, name=threading._newname('Analytics-%d'))
         self._thread_analytics.start()
     self.user = player.databaseID
     self.playerName = player.name
     self._thread_analytics = threading.Thread(
         target=self.analytics_start,
         name=threading._newname('Analytics-%d'))
     self._thread_analytics.start()
Ejemplo n.º 2
0
    def __init__(self,
                 group=None,
                 target=None,
                 name=None,
                 args=(),
                 kwargs=None,
                 *,
                 daemon=None,
                 number,
                 style):

        assert group is None, "group argument must be None for now"
        if kwargs is None:
            kwargs = {}
        self._target = target
        self._name = str(name or _newname())
        self._args = args
        self._kwargs = kwargs
        if daemon is not None:
            self._daemonic = daemon
        else:
            self._daemonic = current_thread().daemon
        self._ident = None
        self._tstate_lock = None
        self._started = Event()
        self._is_stopped = False
        self._initialized = True

        self._stderr = _sys.stderr

        self.number = number
        self.style = style
Ejemplo n.º 3
0
    def __init__(self): # pylint:disable=super-init-not-called
        #_DummyThread_.__init__(self)

        # It'd be nice to use a pattern like "greenlet-%d", but maybe somebody out
        # there is checking thread names...
        self._name = self._Thread__name = __threading__._newname("DummyThread-%d")
        # All dummy threads in the same native thread share the same ident
        # (that of the native thread)
        self._set_ident()

        g = getcurrent()
        gid = _get_ident(g)
        __threading__._active[gid] = self
        rawlink = getattr(g, 'rawlink', None)
        if rawlink is not None:
            # raw greenlet.greenlet greenlets don't
            # have rawlink...
            rawlink(_cleanup)
        else:
            # ... so for them we use weakrefs.
            # See https://github.com/gevent/gevent/issues/918
            global _weakref
            if _weakref is None:
                _weakref = __import__('weakref')
            ref = _weakref.ref(g, _make_cleanup_id(gid))
            self.__raw_ref = ref
Ejemplo n.º 4
0
    def __init__(self): # pylint:disable=super-init-not-called
        #_DummyThread_.__init__(self)

        # It'd be nice to use a pattern like "greenlet-%d", but maybe somebody out
        # there is checking thread names...
        self._name = self._Thread__name = __threading__._newname("DummyThread-%d")
        # All dummy threads in the same native thread share the same ident
        # (that of the native thread)
        self._set_ident()

        g = getcurrent()
        gid = _get_ident(g)
        __threading__._active[gid] = self
        rawlink = getattr(g, 'rawlink', None)
        if rawlink is not None:
            # raw greenlet.greenlet greenlets don't
            # have rawlink...
            rawlink(_cleanup)
        else:
            # ... so for them we use weakrefs.
            # See https://github.com/gevent/gevent/issues/918
            global _weakref
            if _weakref is None:
                _weakref = __import__('weakref')
            ref = _weakref.ref(g, _make_cleanup_id(gid))
            self.__raw_ref = ref
Ejemplo n.º 5
0
    def __init__(self): # pylint:disable=super-init-not-called
        #_DummyThread_.__init__(self)

        # It'd be nice to use a pattern like "greenlet-%d", but there are definitely
        # third-party libraries checking thread names to detect DummyThread objects.
        self._name = self._Thread__name = __threading__._newname("Dummy-%d")
        # All dummy threads in the same native thread share the same ident
        # (that of the native thread), unless we're monkey-patched.
        self._set_ident()

        g = getcurrent()
        gid = _get_ident(g)
        __threading__._active[gid] = self
        rawlink = getattr(g, 'rawlink', None)
        if rawlink is not None:
            # raw greenlet.greenlet greenlets don't
            # have rawlink...
            rawlink(_cleanup)
        else:
            # ... so for them we use weakrefs.
            # See https://github.com/gevent/gevent/issues/918
            ref = self.__weakref_ref
            ref = ref(g, _make_cleanup_id(gid)) # pylint:disable=too-many-function-args
            self.__raw_ref = ref
            assert self.__raw_ref is ref # prevent pylint thinking its unused
Ejemplo n.º 6
0
    def add_worker(self, nb = 1, name = None):
        tname = name or self.worker_name or "Thread"

        for n in range(nb): # pylint: disable=unused-variable
            t = threading.Thread(target = self.process_request_thread,
                                 args   = (self,))
            t.setName(threading._newname("%s:%%d" % tname)) # pylint: disable=protected-access
            t.daemon = True
            t.start()
Ejemplo n.º 7
0
    def __init__(self):
        #_DummyThread_.__init__(self) # pylint:disable=super-init-not-called

        # It'd be nice to use a pattern like "greenlet-%d", but maybe somebody out
        # there is checking thread names...
        self._name = self._Thread__name = __threading__._newname("DummyThread-%d")
        self._set_ident()

        __threading__._active[_get_ident()] = self
        g = getcurrent()
        rawlink = getattr(g, 'rawlink', None)
        if rawlink is not None:
            rawlink(_cleanup)
Ejemplo n.º 8
0
    def __init__(self):
        #_DummyThread_.__init__(self) # pylint:disable=super-init-not-called

        # It'd be nice to use a pattern like "greenlet-%d", but maybe somebody out
        # there is checking thread names...
        self._name = self._Thread__name = __threading__._newname("DummyThread-%d")
        self._set_ident()

        __threading__._active[_get_ident()] = self
        g = getcurrent()
        rawlink = getattr(g, 'rawlink', None)
        if rawlink is not None:
            rawlink(_cleanup)
Ejemplo n.º 9
0
 def __init__(self, group=None, target=None, name=None, args=(),
              kwargs=None, *, daemon=None):
     assert group is None, "group argument must be None for now"
     if kwargs is None:
         kwargs = {}
     self._target = target
     self._name = str(name or _original_threading_._newname())
     self._args = args
     self._kwargs = kwargs
     self._daemonic = False
     self._ident = None
     self._started = Event()
     self._ended = Event()
     self._is_stopped = False
     self._initialized = True
     self._task = None
Ejemplo n.º 10
0
    def __init__(self, threadPool, **kwds):
        """Set up thread in daemonic mode and start it immediatedly.

        requestsQueue and resultQueue are instances of Queue.Queue passed
        by the ThreadPool class when it creates a new worker thread.
        """

        if 'name' not in kwds:
            kwds['name'] = threading._newname('Wkr%d')

        BackgroundThread.__init__(self, **kwds)
        self.setDaemon(1)

        self.workRequestQueue = threadPool.requestsQueue

        self._dismissed = threading.Event()
        self.request_info = {}
        self.start()
Ejemplo n.º 11
0
    def __init__(self, threadPool, **kwds):
        """Set up thread in daemonic mode and start it immediatedly.

        requestsQueue and resultQueue are instances of Queue.Queue passed
        by the ThreadPool class when it creates a new worker thread.
        """

        if 'name' not in kwds:
            kwds['name'] = threading._newname('Wkr%d')

        BackgroundThread.__init__(self, **kwds)
        self.setDaemon(1)

        self.workRequestQueue = threadPool.requestsQueue

        self._dismissed = threading.Event()
        self.request_info = {}
        self.start()
Ejemplo n.º 12
0
def run(commands, concurrency=None, sleep_seconds=0.5, start_command=None):
    '''
    Run subprocess commands in parallel and yield the results kj

    Parameters
    ----------
    commands: iterable
        The commands to execute.
    concurrency: int or None
        Maximum number of commands to run at any given time. If None,
        all commands are started simultaneously.
    sleep_seconds: float
        Seconds to sleep in-between checking the status of commands.
    start_command: callable
        Function used to start commands. Must return a subprocess.Popen object.

    Yields
    ------
    (subprocess.Popen, cmd)
        Yields the completed subprocess.Popen object and the command element
        from which it was created.
    '''
    import queue
    import threading

    result_queue = queue.Queue()

    def command_finished(proc, cmd):
        result_queue.put((proc, cmd))

    def run_commands():
        run_with_callback(commands, concurrency, sleep_seconds,
                          command_finished, start_command)
        result_queue.put(None)

    t1 = threading.Thread(
        target=run_commands,
        name=threading._newname('ConcurrentCommandRunner-%d'))
    t1.start()

    for proc, cmd in iter(result_queue.get, None):
        yield proc, cmd
Ejemplo n.º 13
0
def base_execute(
        cmd, input=None, shell=False, env=None, cwd=None,
        stderr_to_stdout=False,
        do_on_read_stdout=do_write_stdout,
        do_on_read_stderr=do_write_stderr,
        do_on_read_pipeline_stderr=do_write_stderr,
        timeout=None,
        kill_timeout=None,
):
    if input and not isinstance(input, types.GeneratorType):
        raise TypeError('input argument must be a generator')

    index = _get_uid()

    start_time = time.time()
    log.info("Start time: %s uid=%s", start_time, index)

    opened_fds = []
    processes = []
    threads = []
    try:
        STDOUT_or_PIPE = subprocess.STDOUT if stderr_to_stdout else \
            subprocess.PIPE

        if shell:
            commands = [{'cmd': cmd, 'err': None, 'out': None}]
        else:
            commands = shlex_split_and_group_by_commands(cmd)

        commands[-1]['out'] = _process_out_and_err(
            commands[-1]['out'], opened_fds, subprocess.PIPE)
        commands[-1]['err'] = _process_out_and_err(
            commands[-1]['err'], opened_fds, STDOUT_or_PIPE)

        last_stdout = subprocess.PIPE if input else None

        read_end, default_out = os.pipe()
        read_end = os.fdopen(read_end, 'rb', 2048)
        default_out = os.fdopen(default_out, 'wb', 2048)
        opened_fds.append(default_out)
        opened_fds.append(read_end)

        default_out = None

        for i, command in enumerate(commands):
            log.info("Running p%d %r uid=%s", i, command, index)
            out = _process_out_and_err(command['out'], opened_fds, default_out)
            err = _process_out_and_err(command['err'], opened_fds, default_out)
            args = command['cmd']

            process = subprocess.Popen(args, stdout=out, stderr=err,
                                       stdin=last_stdout, shell=shell, env=env,
                                       cwd=cwd)
            process.args = args
            process.printable_args = _printable_args(args)
            processes.append(process)
            last_stdout = process.stdout

        last_process = processes[-1]
        first_process = processes[0]

        threads_info__fd__action__name__target = (
            (last_process.stderr, do_on_read_stderr, 'stderr-reader', _reader),
            (last_process.stdout, do_on_read_stdout, 'stdout-reader', _reader),
            (read_end, do_on_read_pipeline_stderr, 'pipe-err-reader', _reader),
            (first_process.stdin, input, 'stdin-writer', _writer),
        )

        for fd, action, name, target in threads_info__fd__action__name__target:
            if fd:
                thread_name = threading._newname(name + "-thread-%d")
                thread = threading.Thread(target=target,
                                          args=(fd, action, thread_name),
                                          name=thread_name)
                threads.append(thread)
                log.info('Starting %s uid=%s', thread_name, index)
                thread.start()
            else:
                log.info('Skip %s-thread uid=%s', name, index)

        log.info('Waiting command execution uid=%s', index)
        _wait_with_timeout(last_process, timeout, cmd, index)

    finally:
        log.info('Polling processes uid=%s', index)
        for i, process in enumerate(processes):
            process.poll()
            log.debug("Process p%d returncode %r uid=%s",
                      i, process.returncode, index)
            if process.returncode is None:
                log.info("Terminating process p%d uid=%s", i, index)
                process.terminate()
                # wait and kill !

        if kill_timeout is not None:
            log.warning('Killing processes! kill_timeout=%s uid=%s',
                        kill_timeout, index)
            time.sleep(kill_timeout)
            for i, process in enumerate(processes):
                process.poll()
                if process.returncode is None:
                    log.info("Kill process p%d uid=%s", i, index)
                    process.kill()

        log.info('Closing process in/out/err descriptors uid=%s', index)
        for i, process in enumerate(processes):
            for fd_name, fd in (('in', process.stdin), ('out', process.stdout),
                                ('err', process.stderr)):
                if fd and not fd.closed:
                    log.info('Close %s for p%d uid=%s', fd_name, i, index)
                    _safe_close(fd, index)

        log.info('Closing descriptors uid=%s', index)
        for fd in opened_fds:
            _safe_close(fd, index)

        for thread in threads:
            status = "alive" if thread.is_alive() else "stopped"
            log.debug("Waiting thread %s %s uid=%s", status,
                      thread.name, index)
            thread.join()

        finally_time = time.time()
        log.info("Finally time: %s (+%s) uid=%s",
                 finally_time, finally_time - start_time, index)

    return [process.returncode for process in processes]
Ejemplo n.º 14
0
 def update_event(self, inp=-1):
     self.set_output_val(0, threading._newname(self.input(0)))
Ejemplo n.º 15
0
def base_execute(
        cmd, input=None, shell=False, env=None, cwd=None,
        stderr_to_stdout=False,
        do_on_read_stdout=do_write_stdout,
        do_on_read_stderr=do_write_stderr,
        do_on_read_pipeline_stderr=do_write_stderr,
        timeout=None,
):
    if input and not isinstance(input, types.GeneratorType):
        raise TypeError('input argument must be a generator')

    start_time = time.time()
    log.info("Start time: {0}".format(start_time))

    opened_fds = []
    processes = []
    threads = []
    try:
        STDOUT_or_PIPE = subprocess.STDOUT if stderr_to_stdout else \
            subprocess.PIPE

        if shell:
            commands = [{'cmd': cmd, 'err': None, 'out': None}]
        else:
            commands = shlex_split_and_group_by_commands(cmd)

        commands[-1]['out'] = _process_out_and_err(
            commands[-1]['out'], opened_fds, subprocess.PIPE)
        commands[-1]['err'] = _process_out_and_err(
            commands[-1]['err'], opened_fds, STDOUT_or_PIPE)

        last_stdout = subprocess.PIPE if input else None

        read_end, default_out = os.pipe()
        read_end = os.fdopen(read_end, 'rb', 2048)
        default_out = os.fdopen(default_out, 'wb', 2048)
        opened_fds.append(default_out)
        opened_fds.append(read_end)

        default_out = None

        for command in commands:
            log.info("Running {0}".format(command))
            out = _process_out_and_err(command['out'], opened_fds, default_out)
            err = _process_out_and_err(command['err'], opened_fds, default_out)
            args = command['cmd']

            process = subprocess.Popen(args, stdout=out, stderr=err,
                                       stdin=last_stdout, shell=shell, env=env,
                                       cwd=cwd)
            process.args = args
            process.printable_args = _printable_args(args)
            processes.append(process)
            last_stdout = process.stdout

        last_process = processes[-1]
        first_process = processes[0]

        threads_info__fd__action__name__target = (
            (last_process.stderr, do_on_read_stderr, 'stderr-reader', _reader),
            (last_process.stdout, do_on_read_stdout, 'stdout-reader', _reader),
            (read_end, do_on_read_pipeline_stderr, 'pipe-err-reader', _reader),
            (first_process.stdin, input, 'stdin-writer', _writer),
        )

        for fd, action, name, target in threads_info__fd__action__name__target:
            if fd:
                thread_name = threading._newname(name + "-thread-%d")
                thread = threading.Thread(target=target,
                                          args=(fd, action, thread_name),
                                          name=thread_name)
                threads.append(thread)
                log.info('Starting {0}'.format(thread_name))
                thread.start()
            else:
                log.info('Skip {0}-thread'.format(name))

        log.info('Waiting command execution')
        _wait_with_timeout(last_process, timeout, cmd)

    finally:
        log.info('Polling processes')
        for process in processes:
            process.poll()
            log.debug("Process status: `{0}` -> {1}"
                      .format(process.printable_args, process.returncode))
            if process.returncode is None:
                log.info("Terminating process `{0}`"
                         .format(process.printable_args))
                process.terminate()
                # may be wait and kill ?

        log.info('Closing process in/out/err descriptors')
        for process in processes:
            for fd_name, fd in (('in', process.stdin), ('out', process.stdout),
                                ('err', process.stderr)):
                if fd and not fd.closed:
                    log.info("Close {0} for `{1}`"
                             .format(fd_name, process.printable_args))
                    _safe_close(fd)

        log.info('Closing descriptors')
        for fd in opened_fds:
            _safe_close(fd)

        for thread in threads:
            status = "alive" if thread.is_alive() else "stopped"
            log.debug("Waiting thread {0} - {1}".format(thread.name, status))
            thread.join()

        finally_time = time.time()
        log.info("Finally time: {0} (+{1})"
                 .format(finally_time, finally_time - start_time))

    return [process.returncode for process in processes]
Ejemplo n.º 16
0
    def __init__(self, *args, **kwargs):
        name_prefix = kwargs.pop('name_prefix', None)
        if name_prefix is not None:
            kwargs['name'] = name_prefix + threading._newname('-%d')

        threading.Thread.__init__(self, *args, **kwargs)
Ejemplo n.º 17
0
 def __init__(self):
     Thread.__init__(self, name=_original_threading_._newname("Dummy-%d"),
                     daemon=True)
     self._set_ident()
     _active[self._ident] = self