Beispiel #1
0
 def exposed_handle_cli(self, args):
     args = rpyc.utils.classic.obtain(args)
     logger.verbose('Running command `{}` for client.', ' '.join(args))
     parser = get_parser()
     try:
         options = parser.parse_args(args, file=self.client_out_stream)
     except SystemExit as e:
         if e.code:
             # TODO: Not sure how to properly propagate the exit code back to client
             logger.debug(
                 'Parsing cli args caused system exit with status {}.',
                 e.code)
         return
     # Saving original terminal size to restore after monkeypatch
     original_terminal_info = terminal.terminal_info
     # Monkeypatching terminal_size so it'll work using IPC
     terminal.terminal_info = self._conn.root.terminal_info
     context_managers = []
     # Don't capture any output when used with --cron
     if not options.cron:
         context_managers.append(capture_console(self.client_out_stream))
         if options.loglevel != 'NONE':
             context_managers.append(
                 capture_logs(self.client_log_sink, level=options.loglevel))
     try:
         with contextlib.ExitStack() as stack:
             for cm in context_managers:
                 stack.enter_context(cm)
             self.manager.handle_cli(options)
     finally:
         # Restoring original terminal_size value
         terminal.terminal_info = original_terminal_info
Beispiel #2
0
 def wrapper(self, *args, **kw):
     # Set the appropriate logger context while running task
     cms = [
         logger.contextualize(task=self.name,
                              task_id=self.id,
                              session_id=self.session_id)
     ]
     # Capture console output if configured to do so
     if self.output:
         cms.append(capture_console(self.output))
     with contextlib.ExitStack() as stack:
         for cm in cms:
             stack.enter_context(cm)
         return func(self, *args, **kw)
Beispiel #3
0
    def post(self, session: Session = None) -> Response:
        """ Execute task and stream results """
        data = request.json
        for task in data.get('tasks'):
            if task.lower() not in [
                    t.lower()
                    for t in self.manager.user_config.get('tasks', {}).keys()
            ]:
                raise NotFoundError(f'task {task} does not exist')

        queue = ExecuteLog()
        output = queue if data.get('loglevel') else None
        stream = (True if any(
            arg[0] in ['progress', 'summary', 'loglevel', 'entry_dump']
            for arg in data.items() if arg[1]) else False)
        loglevel = data.pop('loglevel', None)

        if loglevel:
            loglevel = loglevel.upper()

        # This emulates the CLI command of using `--now` and `no-cache`
        options = {
            'interval_ignore': data.pop('now', None),
            'nocache': data.pop('no_cache', None),
            'allow_manual': True,
        }

        for option, value in data.items():
            options[option] = value

        if data.get('inject'):
            entries = []
            for item in data.get('inject'):
                entry = Entry()
                entry['url'] = item['url']
                if not item.get('title'):
                    try:
                        value, params = cgi.parse_header(
                            requests.head(
                                item['url']).headers['Content-Disposition'])
                        entry['title'] = params['filename']
                    except KeyError:
                        raise BadRequest(
                            'No title given, and couldn\'t get one from the URL\'s HTTP response'
                        )

                else:
                    entry['title'] = item.get('title')
                if item.get('force'):
                    entry['immortal'] = True
                if item.get('accept'):
                    entry.accept(reason='accepted by API inject')
                if item.get('fields'):
                    for key, value in item.get('fields').items():
                        entry[key] = value
                entries.append(entry)
            options['inject'] = entries

        if output:
            with capture_console(output), capture_logs(output, level=loglevel):
                executed_tasks = self.manager.execute(options=options)
        else:
            executed_tasks = self.manager.execute(options=options)

        tasks_queued = []

        for task_id, task_name, task_event in executed_tasks:
            tasks_queued.append({
                'id': task_id,
                'name': task_name,
                'event': task_event
            })
            _streams[task_id] = {
                'queue': queue,
                'last_update': datetime.now(),
                'args': data
            }

        if not stream:
            return jsonify({
                'tasks': [{
                    'id': task['id'],
                    'name': task['name']
                } for task in tasks_queued]
            })

        def stream_response():
            # First return the tasks to execute
            yield '{"stream": ['
            yield json.dumps({
                'tasks': [{
                    'id': task['id'],
                    'name': task['name']
                } for task in tasks_queued]
            }) + ',\n'

            while True:
                try:
                    yield queue.get(timeout=1) + ',\n'
                    continue
                except Empty:
                    pass

                if queue.empty() and all(
                    [task['event'].is_set() for task in tasks_queued]):
                    for task in tasks_queued:
                        del _streams[task['id']]
                    break
            yield '{}]}'

        return Response(stream_response(), mimetype='text/event-stream')