Ejemplo n.º 1
0
class ChatBot(object):
    """
    Generic base class for chatbots. Subclasses must provide
    a reply method and a messages property generator
    """

    def __init__(self, redis_host, redis_port):
        self.db = JobsDB(redis_host, redis_port)
        self.builtins = { 'help': self._help,
                          'jobs': self._jobs,
                          'logs': self._logs,
                          'cancel': self._cancel,
                          'confirm': self._confirm,
                          'workers': self._workers }
        self.message_queue = []

        self.executor = ThreadPoolExecutor(max_workers=10)
        self.executor.submit(self._message_worker)

    @abc.abstractmethod
    def reply(self, text, channel):
        raise NotImplementedError

    @abc.abstractproperty
    def messages(self):
        """
        Generator yielding message tuples
        in the form (text,user,channel)
        """
        raise NotImplementedError

    def _message_worker(self):
        for msg in self.messages:
            try:
                self._process_msg(*msg)
            except Exception as e:
                log.error(e)

    def _process_msg(self, text, user, channel):
        """
        """
        command, args = self._parse_command(text)

        if command in self.builtins:
            self.reply(self.builtins[command](args), channel)
            self.reply('EOF', channel)
        else:
            ok, result = self.db.create_job(command, args=args, initiator=user)
            if not ok:
                self.reply(result, channel)
                self.reply('EOF', channel)
                return

            job_id = result
            log.info('Created job %s' % job_id)

            job = self.db.get_job(job_id)
            if job['status'] == 'pending':
                self.reply('%s needs confirmation' % str(job_id), channel)
                self.reply('EOF', channel)

            if job['chatbot_stream'] != 'False':
                self.executor.submit(self._output_handler, job_id, channel)

    @staticmethod
    def _parse_command(text):
        """
        Parse message text; return command and arguments
        """
        words = text.split(' ')
        cmd = words.pop(0)
        args = ' '.join(words)

        return cmd, args

    def _output_handler(self, job_id, channel, stream=True):
        """
        Worker to send the output of a given job_id to a given channel
        params:
         - stream(bool): Toggle streaming output as it comes in
           vs posting when a job finishes. Default False.
        """
        active = False
        prefix = '[%s]' % job_id[-8:]
        log.debug('output handler spawned for job %s' % job_id)

        # sleep on jobs awaiting confirmation
        while not active:
            job = self.db.get_job(job_id)
            if job['status'] == 'canceled':
                return
            if job['status'] != 'pending':
                active = True
            else:
                sleep(1)

        if stream:
            for line in self.db.get_log(job_id, timestamp=False):
                self.reply(prefix + line, channel)
        else:
            msg = ''
            for line in self.db.get_log(job_id, timestamp=False):
                msg += prefix + line + '\n'
            self.reply(msg, channel)

        self.reply('EOF', channel)

    ######
    # Builtin command methods
    ######

    def _confirm(self, arg):
        """ Confirm a pending job """
        job = self.db.get_job(arg)
        if not job:
            return 'no such job id'
        if job['status'] != 'pending':
            return 'job not awaiting confirm'

        self.db.update_job(arg, 'status', 'ready')

    def _cancel(self, arg):
        """ Cancel a pending job """
        job = self.db.get_job(arg)
        if not job:
            return 'no such job id'

        ok, result = self.db.cancel_job(arg)
        if not ok:
            return result

        return 'job %s canceled' % job['id']

    def _workers(self, arg):
        """ Show registered job workers """
        workers = self.db.get_workers()
        if not workers:
            return 'no registered workers'
        else:
            return [('%s(%s)' % (w['name'], w['host'])) for w in workers]

    def _jobs(self, arg):
        """ Show jobs by currrent state """
        subcommands = ['pending', 'running', 'completed', 'all']
        if arg not in subcommands:
            return 'argument must be one of %s' % ','.join(subcommands)

        jobs = self.db.get_jobs(status=arg)

        if not jobs:
            return 'no matching jobs found'

        formatted = []
        for j in jobs:
            created = format_time(j['created'])
            formatted.append('%s %s(%s) %s' %
                             (created, j['id'], j['name'], j['status']))

        return formatted

    def _logs(self, args):
        """ Show logs for a given job id """
        if not self.db.get_job(args):
            return 'no matching jobs found'

        return self.db.get_stored_log(args)

    def _help(self, args):
        """ Show this help dialog """
        builtin_cmds = ['Builtin commands:']
        for cmd, func in sorted(self.builtins.items()):
            builtin_cmds.append('  [ %s ]%s' % (cmd, func.__doc__))

        action_cmds = ['Action commands:']
        for cmd in sorted([ a['name'] for a in self.db.get_actions() ]):
            action_cmds.append('  [ %s ]' % cmd)

        return builtin_cmds + action_cmds
        replace('\n', '').replace('  ', '')
Ejemplo n.º 2
0
class JobWorker(object):
    """
    Multivac worker process. Spawns jobs, streams job stdout/stderr,
    and creates actions and groups in redis from config file.
    """
    def __init__(self, redis_host, redis_port, config_path):
        self.pids = {}  # dict of job_id:subprocess object
        self.db = JobsDB(redis_host, redis_port)

        self.config_path = config_path
        self.read_config(self.config_path)
        self.name = self._get_name()

        self.executor = ThreadPoolExecutor(max_workers=10)

        self.run()

    def run(self):
        print('Starting Multivac Job Worker %s' % self.name)
        while True:
            self.db.register_worker(self.name, socket.getfqdn())

            # spawn ready jobs
            for job in self.db.get_jobs(status='ready'):
                self.executor.submit(self._job_worker, job)

            # collect ended processes
            pids = deepcopy(self.pids)
            for job_id, pid in pids.items():
                if not self._is_running(pid):
                    self.db.cleanup_job(job_id)
                    del self.pids[job_id]
                    print('completed job %s' % job['id'])

            # re-read config if modified
            if os.stat(self.config_path).st_mtime != self.config_mtime:
                log.warn('re-reading modified config %s' % self.config_path)
                self.read_config(self.config_path)

            # cancel pending jobs exceeding timeout
            now = time()
            for job in self.db.get_jobs(status='pending'):
                if (now - int(job['created'])) > pending_job_timeout:
                    print('canceling unconfirmed job %s' % job['id'])
                    self.db.cancel_job(job['id'])

            sleep(1)

    def read_config(self, path):
        with open(path, 'r') as of:
            config = yaml.load(of.read())

        self.config_mtime = os.stat(path).st_mtime

        if 'groups' in config:
            self._read_groups(config['groups'])
        self._read_actions(config['actions'])

    def _read_groups(self, groups):
        self.db.purge_groups()
        for group,members in groups.items():
            self.db.add_group(group, members)
            log.info('loaded group %s' % (group))

    def _read_actions(self, actions):
        self.db.purge_actions()
        for a in actions:
            action = { 'confirm_required': False,
                       'chatbot_stream': True,
                       'allow_groups': 'all' }
            action.update(a)

            if isinstance(action['allow_groups'], list): 
                action['allow_groups'] = ','.join(action['allow_groups'])

            self.db.add_action(action)
            log.info('loaded action %s' % (action['name']))

    def _get_name(self):
        """
        Randomly generate a unique name for this worker
        """
        name = names.get_first_name()
        if name in self.db.get_workers():
            self._get_name()
        else:
            return name

    def _is_running(self, pid):
        try:
            os.kill(pid, 0)
        except OSError:
            return False
        return True

    def _job_worker(self, job):
        print('running job %s' % job['id'])
        self.db.update_job(job['id'], 'status', 'running')

        if job['args']:
            cmdline = shlex.split(job['cmd'] + ' ' + job['args'])
        else:
            cmdline = job['cmd']

        proc = subprocess.Popen(
            cmdline,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)

        self.pids[job['id']] = proc.pid

        self.executor.submit(self._log_worker,
                             job['id'],
                             proc.stdout,
                             proc.stderr)

        proc.wait()

    def _log_worker(self, job_id, stdout, stderr):
        log.debug('Log handler started for job %s' % job_id)
        while True:
            output = self._read(stdout)
            error = self._read(stderr)
            if output:
                output = self._sanitize(output)
                self.db.append_job_log(job_id, output)
                log.debug('%s-STDOUT: %s' % (job_id, output))
            if error:
                error = self._sanitize(error)
                self.db.append_job_log(job_id, error)
                log.debug('%s-STDOUT: %s' % (job_id, error))

            # exit when job has been collected
            if job_id not in self.pids:
                log.debug('Log handler stopped for job %s' % job_id)
                return

    def _sanitize(self, line):
        line = line.decode('utf-8')
        line = line.replace('\n', '')

        return line

    def _read(self, pipe):
        """
        Non-blocking method for reading fd
        """
        fd = pipe.fileno()
        fl = fcntl.fcntl(fd, fcntl.F_GETFL)
        fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
        try:
            return pipe.read()
        except:
            return ""