示例#1
0
async def shell_out(*args, **kwargs):
    stdin = kwargs.get('stdin', None)
    if stdin is not None:
        p = process.Subprocess(args,
                               stdin=process.Subprocess.STREAM,
                               stdout=process.Subprocess.STREAM)
        await p.stdin.write(stdin.encode('utf-8'))
        p.stdin.close()
    else:
        p = process.Subprocess(args, stdout=process.Subprocess.STREAM)
    out = await p.stdout.read_until_close()
    exit_code = await p.wait_for_exit(raise_error=True)
    return out.decode('utf-8')
示例#2
0
 def play_stream(self, info):
     self.kill_stream()
     download_cmd = ["rtmpdump",
                     "-r", "rtmpe://%s%s" % (info['streamHost'], info['streamApp']),
                     "-a", info['streamApp'][1:],
                     "-y", "mp3:" + info['surl'],
                     "-o", "-",
                    ]
     play_cmd = ["mplayer", "-cache", "2048", "-quiet", "-"]
     self.download_p = process.Subprocess(
             download_cmd, stdout=subprocess.PIPE, io_loop=ioloop.IOLoop.instance())
     self.play_p = process.Subprocess(
             play_cmd, stdin=self.download_p.stdout, io_loop=ioloop.IOLoop.instance())
     self.play_p.set_exit_callback(self.stream_ended_cb(self.play_p))
示例#3
0
 def grep(self, regex, fn, stdout, stderr, **kw):
     cmd = [self.toolpaths.cmd_grep, '--text', '--line-buffered', '--color=never', '-e', regex]
     if fn:
         cmd.append(fn)
     proc = process.Subprocess(cmd, stdout=stdout, stderr=stderr, **kw)
     log.debug('running grep %s, pid: %s', cmd, proc.proc.pid)
     return proc
示例#4
0
    def open(self, *args, **kwargs):
        log.info("Spawning ccls subprocess")

        # Create an instance of the language server
        proc = process.Subprocess([
            'ccls',
            '--init={"capabilities": {"foldingRangeProvider": false}, "index": {"onChange": true, "trackDependency":2}, "clang": {"resourceDir": "/home/CppLanguageServer/clang+llvm-10.0.0-x86_64-linux-gnu-ubuntu-18.04/lib/clang/10.0.0"}}'
        ],
                                  stdin=subprocess.PIPE,
                                  stdout=subprocess.PIPE)

        # Create a writer that formats json messages with the correct LSP headers
        self.writer = streams.JsonRpcStreamWriter(proc.stdin)

        # Create a reader for consuming stdout of the language server. We need to
        # consume this in another thread
        def consume():
            # Start a tornado IOLoop for reading/writing to the process in this thread
            ioloop.IOLoop()
            reader = streams.JsonRpcStreamReader(proc.stdout)
            reader.listen(lambda msg: self.write_message(json.dumps(msg)))

        thread = threading.Thread(target=consume)
        thread.daemon = True
        thread.start()
示例#5
0
def _Run(callback):
    """Grab the lock and run all commands an subprocesess."""
    job_set = options.options.job_set
    assert job_set in kJobSets.keys(
    ), '--job_set must be one of %r' % kJobSets.keys()
    jobs = kJobSets[job_set]

    client = db_client.DBClient.Instance()
    job = Job(client, job_set)
    got_lock = yield gen.Task(job.AcquireLock)
    if not got_lock:
        logging.warning('Failed to acquire job lock: exiting.')
        callback()
        return

    # Wrap entire call inside a try to make sure we always release the lock.
    try:
        for title, args in jobs:
            logging.info('[%s] running %s' % (title, ' '.join(args)))

            # Run the task and wait for the termination callback.
            proc = process.Subprocess(args, io_loop=ioloop.IOLoop.instance())
            code = yield gen.Task(proc.set_exit_callback)

            logging.info('[%s] finished with code: %r' % (title, code))

    except:
        logging.error(traceback.format_exc())
    finally:
        yield gen.Task(job.ReleaseLock)

    callback()
示例#6
0
def handle_connection(connection, address):
    stream = iostream.IOStream(connection)
    callback = functools.partial(_handle_headers, stream)
    stream.read_until("\r\n\r\n", callback)
    stream.write("HTTP/1.0 200 OK\r\n")
    stream.write("Content-Type: image/gif\r\n")
    stream.write("\r\n")

    stream.write(HEADER_DATA)
    stream.write(INITIAL_FRAME)

    #exec traceroute against host ip
    remote_ip = address[0]
    traceroute_proc = process.Subprocess(
        ['mtr', '-c', '3', '-r', '-o', 'LSD BAW', remote_ip],
        stdout=process.Subprocess.STREAM)
    traceroute_proc.initialize()

    output_buffer = []
    _cb = functools.partial(_handleResult, output_buffer)

    result = traceroute_proc.stdout.read_until_close(callback=_cb,
                                                     streaming_callback=_cb)

    if CLOSE_ON_TIMEOUT:
        callback = functools.partial(closestream, stream, traceroute_proc)
        ioloop.IOLoop.instance().add_timeout(
            datetime.timedelta(seconds=MAX_TIME), callback)

    streams.append((stream, traceroute_proc, output_buffer, 0))
示例#7
0
 def tail(self, ip, n, fn, stdout, stderr, **kw):
     flag_follow = '-F' if self.follow_names else '-f'
     if ip:
         user, password = self.get_user_pass(ip)
         if '\\' in fn:  #win system
             cmd = [
                 self.toolpaths.cmd_sshpass, '-p', password,
                 self.toolpaths.cmd_ssh, '{}@{}'.format(user,
                                                        ip), 'tail', '-n',
                 str(n), flag_follow, '\"{}\"'.format(fn[1:])
             ]
         else:
             cmd = [
                 self.toolpaths.cmd_sshpass, '-p', password,
                 self.toolpaths.cmd_ssh, '{}@{}'.format(user, ip),
                 self.toolpaths.cmd_tail, '-n',
                 str(n), flag_follow, fn
             ]
     else:
         cmd = [self.toolpaths.cmd_tail, '-n', str(n), flag_follow, fn]
     proc = process.Subprocess(cmd,
                               stdout=stdout,
                               stderr=stderr,
                               bufsize=1,
                               **kw)
     log.debug('running tail %s, pid: %s', cmd, proc.proc.pid)
     return proc
示例#8
0
 def awk(self, script, fn, stdout, stderr, **kw):
     cmd = [self.toolpaths.cmd_awk, '--sandbox', script]
     if fn:
         cmd.append(fn)
     proc = process.Subprocess(cmd, stdout=stdout, stderr=stderr, **kw)
     log.debug('running awk %s, pid: %s', cmd, proc.proc.pid)
     return proc
示例#9
0
 def sed(self, script, fn, stdout, stderr, **kw):
     cmd = [self.toolpaths.cmd_sed, '-u', '-e', script]
     if fn:
         cmd.append(fn)
     proc = process.Subprocess(cmd, stdout=stdout, stderr=stderr, **kw)
     log.debug('running sed %s, pid: %s', cmd, proc.proc.pid)
     return proc
示例#10
0
文件: podtube.py 项目: kaesi0/PodTube
def convert_videos():
    global conversion_queue
    global converting_lock
    try:
        remaining = [
            key for key in conversion_queue.keys()
            if not conversion_queue[key]['status']
        ]
        video = sorted(remaining,
                       key=lambda v: conversion_queue[v]['added'])[0]
        conversion_queue[video]['status'] = True
    except Exception:
        return
    with (yield converting_lock.acquire()):
        logging.info('Converting: %s', video)
        audio_file = './audio/{}.mp3'.format(video)
        ffmpeg_process = process.Subprocess([
            'ffmpeg', '-loglevel', 'panic', '-y', '-i',
            get_youtube_url(video), '-f', 'mp3', audio_file + '.temp'
        ])
        try:
            yield ffmpeg_process.wait_for_exit()
            os.rename(audio_file + '.temp', audio_file)
        except Exception as ex:
            logging.error('Error converting file: %s', ex.reason)
            os.remove(audio_file + '.temp')
        finally:
            del conversion_queue[video]
示例#11
0
    async def async_call(self, args=None, timeout=None):
        """
        Calls system command and return parsed output or standard error output

        """
        if args is None:
            args = []

        # Executing command with Tornado subprocess is possible only in main thread
        if threading.main_thread().ident != threading.get_ident():
            return self.call(args=args, timeout=timeout)

        all_args = [
            self.CMD if self.CMD is not None else cfg['tools.%s.cmd' %
                                                      self.NAME]
        ]
        all_args.extend(self.COMMON_ARGS)
        all_args.extend(args)
        cmd = ' '.join(all_args),
        log.debug('Executing: %s', cmd)

        if self._cancelled:
            raise Exception('Task was cancelled')
        task = process.Subprocess(all_args,
                                  stderr=process.Subprocess.STREAM,
                                  stdout=process.Subprocess.STREAM)
        self.proc = task.proc

        coroutine = gen.multi([
            task.wait_for_exit(raise_error=False),
            task.stdout.read_until_close(),
            task.stderr.read_until_close()
        ])

        if not timeout:
            return_code, stdout, stderr = await coroutine
        else:
            try:
                return_code, stdout, stderr = await gen.with_timeout(
                    timedelta(seconds=timeout), coroutine)
            except gen.TimeoutError as exception:
                log.exception(
                    "Command %s timed out after %s while executing %s",
                    self.NAME, timeout, cmd)
                task.proc.kill()
                raise exception

        self.proc = None

        if return_code != 0:
            log.warning("Command '%s' failed wit exit code: %s", cmd,
                        return_code)
            log.debug("Command '%s':\nSTDOUT:\n%s\nSTDERR:\n%s", cmd, stdout,
                      stderr)
            if self.RAISE_ERROR:
                raise subprocess.CalledProcessError(return_code, cmd)

        return self.parser.parse(stdout.decode('utf-8'),
                                 stderr.decode('utf-8'))
示例#12
0
    def process(self):
        self.setState(self.JOB_PROCESSING)

        Log("[JOB:%d] process", self.d['id'])
        cmd = './processor/' + self.d['processor']
        argv = [cmd, str(self.d['id']), self.d['args']]
        sp = process.Subprocess(argv)
        sp.set_exit_callback(self.process_callback)
示例#13
0
 def run(self):
     process.Subprocess.initialize()
     self.p = process.Subprocess(['marian-server', '-c',
                                  self.service['configuration'],
                                  '--quiet-translation',
                                  '-p', self.service['port']])
     self.p.set_exit_callback(self.on_exit)
     ret = yield self.p.wait_for_exit()
示例#14
0
 def zcat(self, fn, stdout, stderr, **kw):
     cmd = [self.toolpaths.cmd_zcat, '-f', '-r', fn]
     proc = process.Subprocess(cmd,
                               stdout=stdout,
                               stderr=stderr,
                               bufsize=1,
                               **kw)
     log.debug('running zcat %s, pid: %s', cmd, proc.proc.pid)
     return proc
示例#15
0
async def test_fork(server):
    """
    This test should not fail. Some Subprocess'es can make the ioloop hang, this tests fails when that happens.
    """
    i = 0
    while i < 5:
        i += 1
        sub_process = process.Subprocess(["true"])
        await sub_process.wait_for_exit(raise_error=False)
示例#16
0
    def start_process(self):
        """
        Start the rstudio process
        """

        self.state['starting'] = True
        try:
            cmd = self.cmd + [
                '--user-identity=' + getpass.getuser(),
                '--www-port=' + str(self.port)
            ]

            server_env = os.environ.copy()

            # Seed RStudio's R and RSTUDIO env variables
            server_env.update(self.env)

            try:
                r_vars = detectR()
                server_env.update(r_vars)
            except:
                raise web.HTTPError(reason='could not detect R',
                                    status_code=500)

            @gen.coroutine
            def exit_callback(code):
                """
                Callback when the rsessionproxy dies
                """
                self.log.info(
                    'rsession process died with code {}'.format(code))
                del self.state['proc']
                if code != 0:
                    yield self.start_process()

            # Runs rsession in background
            proc = process.Subprocess(cmd, env=server_env)
            self.log.info('Starting rsession process...')
            self.state['proc'] = proc
            proc.set_exit_callback(exit_callback)

            for i in range(5):
                if (yield self.is_running()):
                    self.log.info('rsession startup complete')
                    break
                # Simple exponential backoff
                wait_time = max(1.4**i, 5)
                self.log.debug(
                    'Waiting {} before checking if rstudio is up'.format(
                        wait_time))
                yield gen.sleep(wait_time)
            else:
                raise web.HTTPError('could not start rsession in time',
                                    status_code=500)
        finally:
            self.state['starting'] = False
示例#17
0
文件: log.py 项目: arwie/controlOS
    async def get(self, field):
        args = journalArgs.copy()
        args.append('--field={}'.format(field))
        journalProc = process.Subprocess(args,
                                         stdout=process.Subprocess.STREAM)

        values = await journalProc.stdout.read_until_close()
        self.writeJson(values.decode().splitlines())

        await journalProc.wait_for_exit(raise_error=False)
示例#18
0
def test_fork(server, io_loop):
    """
        This test should not fail. Some Subprocess'es can make the ioloop hang, this tests fails when that happens.
    """
    i = 0
    while i < 5:
        i += 1
        sub_process = process.Subprocess(["true"])
        yield sub_process.wait_for_exit(raise_error=False)
        sub_process.uninitialize()
示例#19
0
 def tail(self, n, fn, stdout, stderr, **kw):
     flag_follow = '-F' if self.follow_names else '-f'
     cmd = [self.toolpaths.cmd_tail, '-n', str(n), flag_follow, fn]
     proc = process.Subprocess(cmd,
                               stdout=stdout,
                               stderr=stderr,
                               bufsize=1,
                               **kw)
     log.debug('running tail %s, pid: %s', cmd, proc.proc.pid)
     return proc
示例#20
0
    async def start_process(self):
        """
        Start the process
        """
        if 'starting' in self.state:
            raise Exception(
                "Process {} start already pending, can not start again".format(
                    self.name))
        if 'proc' in self.state:
            raise Exception("Process {} already running, can not start".format(
                self.name))
        self.state['starting'] = True
        cmd = self.get_cmd()

        server_env = os.environ.copy()

        # Set up extra environment variables for process
        server_env.update(self.get_env())

        def exit_callback(code):
            """
            Callback when the process dies
            """
            self.log.info('{} died with code {}'.format(self.name, code))
            self.state.pop('proc', None)
            if code != 0 and not 'starting' in self.state:
                ioloop.IOLoop.current().add_callback(self.start_process)

        # Runs process in background
        self.log.info('Starting process...')
        proc = process.Subprocess(cmd, env=server_env, cwd=self.get_cwd())
        proc.set_exit_callback(exit_callback)

        for i in range(8):
            if (await self.is_running(proc)):
                self.log.info('{} startup complete'.format(self.name))
                break
            # Simple exponential backoff
            wait_time = 1.4**i
            self.log.debug(
                'Waiting {} seconds before checking if {} is up'.format(
                    wait_time, self.name))
            await gen.sleep(wait_time)
        else:
            # clear starting state for failed start
            self.state.pop('starting', None)
            # terminate process
            proc.terminate()
            raise web.HTTPError(500,
                                'could not start {} in time'.format(self.name))

        # add proc to state only after we are sure it has started
        self.state['proc'] = proc

        del self.state['starting']
示例#21
0
async def get_network_info():
    fd_network = process.Subprocess("netstat -i | tail -n +2",
                                    **SUBPROCESS_OPTS).stdout
    network_result = await fd_network.read_until_close()
    network_result = network_result.decode('utf-8').strip()
    network_result = network_result.split("\n")
    header = network_result[0].split()
    interfaces = network_result[1:]
    result = []
    for iface in interfaces:
        result.append(
            {key: value
             for key, value in zip(header, iface.split())})
    return result
示例#22
0
    def open(self, *args, **kwargs):
        proc = process.Subprocess(
            [
                'pyls', '-v'
            ],  # 具体的LSP实现进程,如 'pyls -v'、'ccls --init={"index": {"onChange": true}}'等
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE)
        self.writer = streams.JsonRpcStreamWriter(proc.stdin)

        def consume():
            ioloop.IOLoop()
            reader = streams.JsonRpcStreamReader(proc.stdout)
            reader.listen(lambda msg: self.write_message(json.dumps(msg)))

        thread = threading.Thread(target=consume)
        thread.daemon = True
        thread.start()
示例#23
0
    def _exec(self):
        self.clean() # Clean up old plans

        with tempfile.TemporaryDirectory() as tmpdir:
            cmd = (
                '/usr/bin/env', 'python3',
                bbctrl.get_resource('plan.py'),
                os.path.abspath(self.gcode), json.dumps(self.state),
                json.dumps(self.config),
                '--max-time=%s' % self.preplanner.max_plan_time,
                '--max-loop=%s' % self.preplanner.max_loop_time
            )

            self.preplanner.log.info('Running: %s', cmd)

            proc = process.Subprocess(cmd, stdout = process.Subprocess.STREAM,
                                      stderr = process.Subprocess.STREAM,
                                      cwd = tmpdir)
            errs = ''
            self.pid = proc.proc.pid

            try:
                try:
                    while True:
                        line = yield proc.stdout.read_until(b'\n')
                        self.progress = float(line.strip())
                        if self.cancel: return
                except iostream.StreamClosedError: pass

                self.progress = 1

                ret = yield proc.wait_for_exit(False)
                if ret:
                    errs = yield proc.stderr.read_until_close()
                    raise Exception('Plan failed: ' + errs.decode('utf8'))

            finally:
                proc.stderr.close()
                proc.stdout.close()

            if not self.cancel:
                os.rename(tmpdir + '/meta.json',    self.files[0])
                os.rename(tmpdir + '/positions.gz', self.files[1])
                os.rename(tmpdir + '/speeds.gz',    self.files[2])
                os.sync()
示例#24
0
文件: StdChal.py 项目: LFsWang/judge
    def prefetch(self):
        '''Prefetch files.'''

        path_set = set([self.code_path])
        for root, _, files in os.walk(self.res_path):
            for filename in files:
                path_set.add(os.path.abspath(os.path.join(root, filename)))

        path_list = list(path_set)
        proc_list = []

        with StackContext(Privilege.fileaccess):
            for idx in range(0, len(path_list), 16):
                proc_list.append(process.Subprocess(
                    ['./Prefetch.py'] + path_list[idx:idx + 16],
                    stdout=process.Subprocess.STREAM))

        for proc in proc_list:
            yield proc.stdout.read_bytes(2)
示例#25
0
async def call_subprocess(args: List[str], stdin_data: Optional[bytes] = None) -> Tuple[int, bytes, bytes]:
    p = tornado_process.Subprocess(
        args,
        stdin=tornado_process.Subprocess.STREAM,
        stdout=tornado_process.Subprocess.STREAM,
        stderr=tornado_process.Subprocess.STREAM
    )

    if stdin_data:
        await p.stdin.write(stdin_data)
        p.stdin.close()

    exit_future = p.wait_for_exit(raise_error=False)
    stdout_future = p.stdout.read_until_close()
    stderr_future = p.stderr.read_until_close()

    await asyncio.wait({exit_future, stdout_future, stderr_future})

    return exit_future.result(), stdout_future.result(), stderr_future.result()
示例#26
0
文件: log.py 项目: arwie/controlOS
    async def readJournal(self, args):
        journalProc = process.Subprocess(args,
                                         stdout=process.Subprocess.STREAM)
        try:
            while not self.task.cancelled():
                msg = await journalProc.stdout.read_until(b'\n')
                msg = json.loads(msg.decode())

                if not '_SOURCE_REALTIME_TIMESTAMP' in msg:
                    msg['_SOURCE_REALTIME_TIMESTAMP'] = msg[
                        '__REALTIME_TIMESTAMP']
                msg = {k: v for k, v in msg.items() if not k.startswith('__')}

                self.write_messageJson(msg)

        except (iostream.StreamClosedError, asyncio.CancelledError):
            pass
        finally:
            self.close()
            journalProc.proc.terminate()
            await journalProc.wait_for_exit(raise_error=False)
示例#27
0
    def open(self, *args, **kwargs):
        log.info("Spawning pyls subprocess")

        # Create an instance of the language server
        proc = process.Subprocess(['pyls', '-v'],
                                  stdin=subprocess.PIPE,
                                  stdout=subprocess.PIPE)

        # Create a writer that formats json messages with the correct LSP headers
        self.writer = streams.JsonRpcStreamWriter(proc.stdin)

        # Create a reader for consuming stdout of the language server. We need to
        # consume this in another thread
        def consume():
            # Start a tornado IOLoop for reading/writing to the process in this thread
            ioloop.IOLoop()
            reader = streams.JsonRpcStreamReader(proc.stdout)
            reader.listen(lambda msg: self.write_message(json.dumps(msg)))

        thread = threading.Thread(target=consume)
        thread.daemon = True
        thread.start()
示例#28
0
文件: server.py 项目: sskorol/Opus-MT
 def run(self):
     process.Subprocess.initialize()
     self.p = process.Subprocess([
         'marian-server',
         '-c',
         self.service['configuration'],
         '-p',
         self.service['port'],
         '--allow-unk',
         # enables translation with a mini-batch size of 64, i.e. translating 64 sentences at once, with a beam-size of 6.
         '-b',
         '6',
         '--mini-batch',
         '64',
         # use a length-normalization weight of 0.6 (this usually increases BLEU a bit).
         '--normalize',
         '0.6',
         '--maxi-batch-sort',
         'src',
         '--maxi-batch',
         '100',
     ])
     self.p.set_exit_callback(self.on_exit)
     ret = yield self.p.wait_for_exit()
示例#29
0
    def export(self, dry_run, agent_map):
        """
            Export a version to the embedded server
        """
        inmanta_path = [sys.executable, "-m", "inmanta.app"]

        cmd = inmanta_path + [
            "-vvv", "export", "-e",
            str(self._environment_id), "--server_address", "localhost",
            "--server_port",
            str(self._server_port)
        ]

        sub_process = process.Subprocess(cmd,
                                         stdout=process.Subprocess.STREAM,
                                         stderr=process.Subprocess.STREAM)

        log_out, log_err, returncode = yield [
            sub_process.stdout.read_until_close(),
            sub_process.stderr.read_until_close(),
            sub_process.wait_for_exit(raise_error=False)
        ]

        sub_process.uninitialize()
        if returncode > 0:
            print("An error occurred while compiling the model:")
            if len(log_out) > 0:
                print(log_out.decode())
            if len(log_err) > 0:
                print(log_err.decode())

            return False

        LOGGER.info("Export of model complete")
        yield self.deploy(dry_run, agent_map)
        return True
示例#30
0
def mv(from_path, to_path):
    """移动文件"""
    cmd = ['mv', from_path, to_path]
    ps = process.Subprocess(cmd)
    result = yield ps.wait_for_exit(raise_error=False)
    raise gen.Return(result == 0)