Exemplo n.º 1
0
class DescribeUserHandler(handler.TriggeredHandler):
    """Lists the details of some ldap user."""

    required_clients = ("ldap", )
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('ldap describe user', takes_args=True),
        ],
        'args': {
            'order': [
                'user',
            ],
            'help': {
                'user': '******',
            },
            'schema':
            Schema({
                Required("user"): All(scu.string_types(), Length(min=1)),
            }),
        },
    }

    def _run(self, user):
        ldap_client = self.bot.clients.ldap_client
        tmp_user = ldap_client.describe_user(user)
        replier = self.message.reply_text
        if not tmp_user:
            replier("No user with name `%s` found." % (user),
                    threaded=True,
                    prefixed=False)
        else:
            tbl_headers = []
            row = []
            for k in sorted(tmp_user.keys()):
                v = tmp_user.get(k)
                if v is not None:
                    h_k = k.replace("_", ' ')
                    h_k = h_k[0].upper() + h_k[1:]
                    tbl_headers.append(h_k)
                    row.append(v)
            rows = [row]
            lines = [
                "```",
                tabulate.tabulate(rows, headers=tbl_headers),
                "```",
            ]
            replier("\n".join(lines), threaded=True, prefixed=False)
Exemplo n.º 2
0
class RunAllHandler(handler.TriggeredHandler):
    """Explicitly runs all periodic jobs."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics run all', takes_args=True),
        ],
        'args': {
            'order': [
                'skip_paused',
            ],
            'help': {
                'skip_paused': ('skip over paused jobs (ie do not'
                                ' unpause them)'),
            },
            'defaults': {
                'skip_paused': True,
            },
            'converters': {
                'skip_paused': hu.strict_bool_from_string,
            },
            'schema': Schema({
                Required("skip_paused"): bool,
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, skip_paused):
        kicked = 0
        seen_jobs = set()
        skipped = 0
        for job in self.bot.scheduler.get_jobs():
            if job.id in seen_jobs:
                continue
            seen_jobs.add(job.id)
            if skip_paused and job.next_run_time is None:
                skipped += 1
                continue
            job.modify(next_run_time=self.date_wrangler.get_now())
            kicked += 1
        if kicked:
            self.bot.scheduler.wakeup()
        text = ("Kicked %s jobs"
                " and skipped %s jobs.") % (kicked, skipped)
        self.message.reply_text(text, threaded=True, prefixed=False)
Exemplo n.º 3
0
class AddHandler(handler.TriggeredHandler):
    """Alias a long command to a short(er) one (for the calling user)."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('alias add', True),
        ],
        'schema':
        Schema({
            Required("long"): All(su.string_types(), Length(min=1)),
            Required("short"): All(su.string_types(), Length(min=1)),
        }),
        'args': {
            'order': [
                'long',
                'short',
            ],
            'help': {
                'long': "full command",
                'short': 'shorter alias of full command',
            },
        },
    }

    def _run(self, long, short):
        from_who = self.message.body.user_id
        if not from_who:
            return
        from_who = "user:%s" % from_who
        with self.bot.locks.brain:
            try:
                user_info = self.bot.brain[from_who]
            except KeyError:
                user_info = {}
            user_aliases = user_info.setdefault('aliases', {})
            user_aliases[short] = long
            self.bot.brain[from_who] = user_info
            self.bot.brain.sync()
            lines = [
                "Alias of `%s` to `%s` has been recorded." % (short, long),
            ]
        replier = self.message.reply_text
        replier("\n".join(lines), threaded=True, prefixed=False)
Exemplo n.º 4
0
class JenkinsRestartHandler(handler.TriggeredHandler):
    """Triggers the jenkins the bot is connected to, to restart."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins restart', takes_args=True),
        ],
        'args': {
            'order': [
                'safe',
            ],
            'schema': Schema({
                Required("safe"): bool,
            }),
            'converters': {
                'safe': hu.strict_bool_from_string,
            },
            'help': {
                'safe': "perform a safe restart (letting active jobs finish)",
            },
            'defaults': {
                'safe': True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, safe):
        jenkins_client = self.bot.clients.jenkins_client
        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        if safe:
            replier("Engaging *safe* jenkins restart, please wait...")
        else:
            replier("Engaging *unsafe* (ie forceful)"
                    " jenkins restart, please wait...")
        if jenkins_client.perform_restart(safe=safe):
            replier("Restart acknowledged.")
        else:
            replier("Restart failed.")
Exemplo n.º 5
0
class Handler(handler.TriggeredHandler):
    """Emit some message to some set of slack channels."""

    required_clients = ('slack',)
    requires_slack_sender = True
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('emit message', takes_args=True),
        ],
        'args': {
            'order': ['channels', 'message'],
            'schema': Schema({
                Required("channels"): All(scu.string_types(), Length(min=1)),
                Required("message"): All(scu.string_types(), Length(min=1)),
            }),
            'help': {
                'channels': ('comma separated list of channels'
                             ' to broadcast to'),
                'message': 'what to broadcast',
            },
        },
    }

    def _run(self, channels, message):
        slack_sender = self.bot.slack_sender
        slack_server = self.bot.clients.slack_client.server
        ok_channels = []
        seen = set()
        for maybe_c in channels.split(","):
            maybe_c = maybe_c.strip()
            if maybe_c and maybe_c not in seen:
                tmp_c = slack_server.channels.find(maybe_c)
                if tmp_c is None:
                    raise RuntimeError("Could not find channel '%s'" % maybe_c)
                else:
                    if tmp_c.id not in seen:
                        seen.add(maybe_c)
                        seen.add(tmp_c.id)
                        ok_channels.append(tmp_c)
        for ch in ok_channels:
            slack_sender.rtm_send(message, channel=ch.id)
Exemplo n.º 6
0
class WatchHandler(JobWatcher):
    """Watches a jenkins jobs build."""

    build_info_delay = 10

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'followers': [
            ConsoleFollower,
            AbortFollower,
        ],
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins watch', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
                'build',
            ],
            'schema':
            Schema({
                Required("job_name"): All(scu.string_types(), Length(min=1)),
                Required("build"): int,
            }),
            'converters': {
                'build': int,
            },
            'help': {
                'job_name': "job name to watch",
                "build": "build number to watch",
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, job_name, build):
        clients = self.bot.clients
        return self._watch(job_name, build, clients.jenkins_client)
Exemplo n.º 7
0
class RunOneHandler(handler.TriggeredHandler):
    """Explicitly runs one periodic jobs."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics run one', takes_args=True),
        ],
        'args': {
            'order': [
                'job_id',
            ],
            'help': {
                'job_id': 'job id to run',
            },
            'schema': Schema({
                Required("job_id"): All(scu.string_types(), Length(min=1)),
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, job_id):
        job = self.bot.scheduler.get_job(job_id)
        if job is None:
            raise excp.NotFound("Could not find job id '%s'" % job_id)
        elif job.next_run_time is None:
            raise RuntimeError("Paused job '%s' can not be explicitly"
                               " ran (please resume it first)" % job_id)
        else:
            job.modify(next_run_time=self.date_wrangler.get_now())
            self.bot.scheduler.wakeup()
            self.message.reply_text("Job `%s` has had"
                                    " its next run time"
                                    " updated to be now (hopefully it"
                                    " runs soon)." % job_id,
                                    threaded=True, prefixed=False)
Exemplo n.º 8
0
class ResumeHandler(handler.TriggeredHandler):
    """Resumes a previously paused periodic job."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics resume', takes_args=True),
        ],
        'args': {
            'order': [
                'job_id',
            ],
            'help': {
                'job_id': 'job id to resume',
            },
            'schema': Schema({
                Required("job_id"): All(scu.string_types(), Length(min=1)),
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, job_id):
        job = self.bot.scheduler.get_job(job_id)
        if job is None:
            raise excp.NotFound("Could not find job id '%s'" % job_id)
        if job.next_run_time is None:
            job.resume()
            self.bot.scheduler.wakeup()
            self.message.reply_text("Job `%s` has"
                                    " been resumed." % job_id,
                                    threaded=True, prefixed=False)
        else:
            self.message.reply_text("Job `%s` is not paused (so it can"
                                    " not be resumed)." % job_id,
                                    threaded=True, prefixed=False)
Exemplo n.º 9
0
class Handler(handler.TriggeredHandler):
    """Say various trump phrases."""

    trump_url = 'https://api.whatdoestrumpthink.com/api/v1/quotes'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('what would trump say', takes_args=False),
            trigger.Trigger('what would trump say?', takes_args=False),
            trigger.Trigger('trump say something', takes_args=False),
        ],
    }

    def _run(self):
        trump_messages = requests.get(self.trump_url)
        trump_messages.raise_for_status()
        message = random.choice(
            trump_messages.json()["messages"]["non_personalized"])
        replier = self.message.reply_text
        replier(message, threaded=True, prefixed=False)
Exemplo n.º 10
0
class JenkinsInfoHandler(handler.TriggeredHandler):
    """Provides information on the jenkins the bot is connected to."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins info', takes_args=True),
        ],
        'args': {
            'order': [
                'show_nodes',
                'show_plugins',
                'show_jobs',
            ],
            'schema':
            Schema({
                Required("show_nodes"): bool,
                Required("show_plugins"): bool,
                Required("show_jobs"): bool,
            }),
            'converters': {
                'show_nodes': hu.strict_bool_from_string,
                'show_plugins': hu.strict_bool_from_string,
                'show_jobs': hu.strict_bool_from_string,
            },
            'help': {
                'show_nodes': "retrieve node information",
                "show_plugins": "retrieve plugin information",
                "show_jobs": "retrieve job information",
            },
            'defaults': {
                'show_nodes': False,
                'show_plugins': False,
                'show_jobs': False,
            },
        },
    }
    required_clients = ('jenkins', )

    @staticmethod
    def _format_plugin(plugin):
        attachment = {
            'title':
            plugin.long_name,
            'title_link':
            plugin.url,
            'fields': [
                {
                    'title': 'Short Name',
                    'value': plugin.name,
                    'short': True,
                },
            ],
            'mrkdwn_in': [],
        }
        attachment['fields'].append({
            'title': 'Version',
            'value': str(plugin.version),
            'short': True,
        })
        attachment['fields'].append({
            'title': 'Enabled',
            'value': str(plugin.enabled),
            'short': True,
        })
        attachment['fields'].append({
            'title': 'Active',
            'value': str(plugin.active),
            'short': True,
        })
        return attachment

    @staticmethod
    def _format_job_folder(folder):
        attachment = {
            'title': folder.name,
            'title_link': folder.url,
            'fields': [],
        }
        return attachment

    @staticmethod
    def _format_job(job):
        attachment = {
            'title': job.name,
            'title_link': job.url,
            'fields': [],
        }
        try:
            attachment['color'] = su.COLORS[job.color]
        except KeyError:
            pass
        health_report = job.get_health_report()
        if health_report is not None:
            attachment['fields'].append({
                'title': 'Health',
                'value': health_report.description,
                'short': False,
            })
            attachment['fields'].append({
                'title': 'Health Score',
                'value': str(health_report.score),
                'short': True,
            })
        return attachment

    @staticmethod
    def _format_node(node):
        attachment = {
            'fields': [],
            'mrkdwn_in': ['pretext'],
        }
        if node.master:
            attachment['pretext'] = "Master node"
        else:
            attachment['pretext'] = "Slave node"
        attachment['fields'].append({
            'title': 'Name',
            'value': node.name,
            'short': True,
        })
        attachment['fields'].append({
            'title': 'Dynamic',
            'value': str(node.dynamic),
            'short': True,
        })
        attachment['fields'].append({
            'title': 'Offline',
            'value': str(node.offline),
            'short': True,
        })
        if node.offline and node.offline_cause:
            attachment['fields'].append({
                'title': 'Offline Cause',
                'value': str(node.offline_cause),
                'short': False,
            })
        attachment['fields'].append({
            'title': 'Idle',
            'value': str(node.idle),
            'short': True,
        })
        attachment['fields'].append({
            'title': 'Launch Supported',
            'value': str(node.launch_supported),
            'short': True,
        })
        for monitor_name in sorted(node.monitors.keys()):
            val = node.monitors[monitor_name]
            tmp_monitor_name = monitor_name.replace("_", " ")
            tmp_monitor_name = tmp_monitor_name.title()
            attachment['fields'].append({
                'title': tmp_monitor_name,
                'value': str(val),
                'short': True,
            })
        if node.offline:
            attachment['color'] = su.COLORS.red
        return attachment

    def _run(self, show_nodes, show_plugins, show_jobs):
        jenkins_client = self.bot.clients.jenkins_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        replier("Fetching jenkins details, please wait...")
        replier_attachments = self.message.reply_attachments
        jenkins_ver = jenkins_client.get_version()
        if not jenkins_ver:
            jenkins_ver = "??"
        jenkins_sess = jenkins_client.get_session()
        if not jenkins_sess:
            jenkins_sess = "??"
        attachments = [{
            'pretext': ("Connected to"
                        " <%s|jenkins>.") % jenkins_client.base_url,
            'mrkdwn_in': ['pretext'],
            'fields': [
                {
                    'title': "Version",
                    'value': str(jenkins_ver),
                    'short': True,
                },
                {
                    'title': "Session",
                    'value': str(jenkins_sess),
                    'short': True,
                },
                {
                    'title': "Connected As",
                    'value': jenkins_client.username,
                    'short': True,
                },
            ],
        }]
        replier_attachments(attachments=attachments,
                            log=LOG,
                            link_names=True,
                            as_user=True,
                            thread_ts=self.message.body.ts,
                            channel=self.message.body.channel,
                            unfurl_links=False)
        if show_nodes:
            replier("Fetching jenkins node details, please wait...")
            # Always put master node(s?) at the front...
            attachments = []
            tmp_nodes = jenkins_client.get_nodes()
            nodes = []
            nodes.extend(node for node in tmp_nodes if node.master)
            nodes.extend(node for node in tmp_nodes if not node.master)
            for node in nodes:
                attachments.append(self._format_node(node))
            replier_attachments(attachments=attachments,
                                log=LOG,
                                link_names=True,
                                as_user=True,
                                thread_ts=self.message.body.ts,
                                channel=self.message.body.channel,
                                unfurl_links=False)
        if show_plugins:
            replier("Fetching jenkins plugin details, please wait...")
            attachments = []
            for plugin in jenkins_client.get_plugins():
                attachments.append(self._format_plugin(plugin))
            replier_attachments(attachments=attachments,
                                log=LOG,
                                link_names=True,
                                as_user=True,
                                thread_ts=self.message.body.ts,
                                channel=self.message.body.channel,
                                unfurl_links=False)
        if show_jobs:
            replier("Fetching jenkins job details, please wait...")
            jobs = []
            folders = []
            for thing in jenkins_client.iter_jobs(yield_folders=True,
                                                  expand_folders=False):
                if isinstance(thing, tj.JobFolder):
                    folders.append(thing)
                else:
                    jobs.append(thing)
            jobs = sorted(jobs, key=lambda job: job.name)
            attachments = []
            for job in jobs:
                attachments.append(self._format_job(job))
            replier_attachments(attachments=attachments,
                                log=LOG,
                                link_names=True,
                                as_user=True,
                                text="Found %s jenkins jobs." % len(jobs),
                                thread_ts=self.message.body.ts,
                                channel=self.message.body.channel,
                                unfurl_links=False)
            folders = sorted(folders, key=lambda folder: folder.name)
            attachments = []
            for folder in folders:
                attachments.append(self._format_job_folder(folder))
            replier_attachments(attachments=attachments,
                                log=LOG,
                                link_names=True,
                                as_user=True,
                                text="Found %s jenkins jobs folders." %
                                len(folders),
                                thread_ts=self.message.body.ts,
                                channel=self.message.body.channel,
                                unfurl_links=False)
Exemplo n.º 11
0
class JenkinsJobHealthHandler(handler.TriggeredHandler):
    """Provides job health for the jenkins the bot is connected to."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins job-health', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
            ],
            'help': {
                'job_name': 'job name to fetch (blank for all)',
            },
            'schema': Schema({
                Required("job_name"): scu.string_types(),
            }),
            'defaults': {
                'job_name': '',
            },
        },
    }
    required_clients = ('jenkins', )
    folder_jobs = ['master']

    @classmethod
    def _iter_jobs(cls, jenkins_client, jobs, folders):
        all_jobs = []
        for job in jobs:
            all_jobs.append((job, job.name))
        # These folders are typically our multibranch jobs, and we
        # care about how some job(s) under that are working out; so
        # find those job(s) if we can...
        for f in folders:
            for f_job_name in cls.folder_jobs:
                f_job_full_name = f.name + "/job/" + f_job_name
                f_job_short_name = f.name + "/" + f_job_name
                f_job = jenkins_client.get_job(f_job_full_name)
                if f_job is not None:
                    all_jobs.append((f_job, f_job_short_name))
        for job, job_name in sorted(all_jobs, key=lambda v: v[1].lower()):
            yield job, job_name

    @classmethod
    def insert_periodics(cls, bot, scheduler):
        try:
            health_report_period = bot.config.jenkins.health_report_period
        except AttributeError:
            pass
        else:
            slack_client = bot.clients.get("slack_client")
            slack_sender = bot.slack_sender
            if slack_client is not None and slack_sender is not None:
                hr = peu.make_periodic_runner("jenkins health report",
                                              cls,
                                              health_report_period,
                                              channel=bot.config.admin_channel,
                                              log=LOG)
                hr.__module__ = __name__
                hr.__name__ = "run_check_jenkins_health"
                hr_trigger = cron.CronTrigger.from_crontab(
                    health_report_period, timezone=bot.config.tz)
                hr_name = reflection.get_callable_name(hr)
                hr_description = "Periodically analyzes jenkins job health."
                scheduler.add_job(
                    hr,
                    trigger=hr_trigger,
                    jobstore='memory',
                    name="\n".join([hr_name, hr_description]),
                    id=utils.hash_pieces(
                        [health_report_period, hr_name, hr_description],
                        max_len=8),
                    args=(bot, slack_client, slack_sender),
                    coalesce=True)

    def _run(self, job_name=''):
        jenkins_client = self.bot.clients.jenkins_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        replier_attachments = self.message.reply_attachments
        replier("Calculating jenkins job health, please wait...")
        jobs = []
        folders = []
        for thing in jenkins_client.iter_jobs(yield_folders=True,
                                              expand_folders=False):
            if isinstance(thing, tj.JobFolder):
                folders.append(thing)
            else:
                jobs.append(thing)
        job_lines = []
        job_colors = collections.defaultdict(int)
        for job, a_job_name in self._iter_jobs(jenkins_client, jobs, folders):
            if job_name and a_job_name != job_name:
                continue
            job_color = job.color
            if job_color.endswith("_anime"):
                job_color = job_color[:-len('_anime')]
            if job_color in ('notbuilt', 'disabled'):
                continue
            if job_color in ('green', 'blue', 'red', 'yellow'):
                # Slack doesn't seem to have a blue ball, so just
                # switch it...
                if job_color == 'blue':
                    job_color = 'green'
                pretty_job_color = ":%sball:" % job_color
            else:
                pretty_job_color = job_color
            job_colors[job_color] += 1
            job_lines.append(u"• <%s|%s> %s" %
                             (job.url, a_job_name, pretty_job_color))
        num_red = job_colors.get('red', 0)
        num_yellow = job_colors.get('yellow', 0)
        num_ok = job_colors.get("green", 0)
        attachment = {
            'pretext': 'Report for `%s`' % jenkins_client.base_url,
            'text': "\n".join(job_lines),
            'mrkdwn_in': ['text', 'pretext'],
        }
        if num_red:
            attachment['color'] = su.COLORS['red']
        if num_yellow and not num_red:
            attachment['color'] = su.COLORS['yellow']
        if not num_yellow and not num_red and num_ok:
            attachment['color'] = su.COLORS['green']
        replier_attachments(attachments=[attachment],
                            log=LOG,
                            link_names=True,
                            as_user=True,
                            text=' ',
                            thread_ts=self.message.body.ts,
                            channel=self.message.body.channel,
                            unfurl_links=True)
Exemplo n.º 12
0
class JenkinsCheckHandler(handler.TriggeredHandler):
    """Checks jobs in jenkins and ensures master branches are working."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins check', takes_args=True),
        ],
    }
    required_clients = ('jenkins', )

    def _run(self):
        jenkins_client = self.bot.clients.jenkins_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        replier("Fetching jenkins details, please wait...")
        job_folders = []
        for job in jenkins_client.iter_jobs(expand_folders=False,
                                            yield_folders=True):
            if isinstance(job, tj.JobFolder):
                job_folders.append(job)
        found = 0
        replier_attachments = self.message.reply_attachments
        for job_folder in sorted(job_folders, key=lambda f: f.name.lower()):
            if self.dead.is_set():
                raise excp.Dying
            master_job = jenkins_client.get_job("%s/job/master" %
                                                job_folder.name)
            if not master_job:
                continue
            attachment = {
                'pretext': "Job `%s`." % job_folder.name,
                'title': master_job.name,
                'title_link': master_job.url,
                'mrkdwn_in': ['pretext'],
                'fields': [],
            }
            try:
                attachment['color'] = su.COLORS[master_job.color]
            except KeyError:
                attachment['fields'].append({
                    'title': 'Color',
                    'value': master_job.color,
                    'short': True,
                })
            replier_attachments(attachments=[attachment],
                                log=LOG,
                                link_names=True,
                                as_user=True,
                                text=' ',
                                thread_ts=self.message.body.ts,
                                channel=self.message.body.channel,
                                unfurl_links=False)
            found += 1
        if not found:
            replier("No jobs to check found.")
        else:
            replier("%s jobs scanned. Have a nice day!" % found)
Exemplo n.º 13
0
class UnplannedHandler(handler.TriggeredHandler):
    """Creates a unplanned issue + associates it to an active sprint."""

    # Because the client library fetches things over and over
    # and things we know to be the same, aren't changing a lot/ever...
    #
    # Size of these was picked somewhat arbitrarily but should be fine...
    cache = munch.Munch({
        'projects': LRUCache(maxsize=100),
        'boards': LRUCache(maxsize=100),
    })
    required_clients = ('jira', )
    config_section = 'jira'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jira unplanned', takes_args=True),
        ],
        'args': {
            'order': [
                'summary',
                'time_taken',
                'was_resolved',
                'project',
                'board',
            ],
            'converters': {
                'time_taken': _convert_time_taken,
                'was_resolved': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("summary"): All(scu.string_types(), Length(min=1)),
                Required("project"): All(scu.string_types(), Length(min=1)),
                Required("board"): All(scu.string_types(), Length(min=1)),
                Required("time_taken"): int,
                Required("was_resolved"): bool,
            }),
            'help': {
                'summary':
                "short summary of the unplanned work",
                'board':
                'board to locate sprint to'
                ' drop newly created issue in (must exist)',
                'time_taken': ('time taken on unplanned'
                               ' work (ie 30 seconds, 5 minutes,'
                               ' 1 hour, 1 day...)'),
                'project':
                'project to create task in (must exist)',
                'was_resolved':
                'mark the newly created issue as resolved',
            },
            'defaults': {
                'project': 'CAA',
                'board': 'CAA board',
                'time_taken': "1 hour",
                "was_resolved": True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    @staticmethod
    def _find_and_cache(fetcher_func, match_func, cache_target, cache_key):
        if cache_key and cache_key in cache_target:
            return cache_target[cache_key]
        offset = 0
        result = None
        found = False
        while not found:
            items = fetcher_func(start_at=offset)
            if not items:
                break
            else:
                for item in items:
                    if match_func(item):
                        result = item
                        found = True
                        break
                if not found:
                    offset = offset + len(items) + 1
        if found and cache_key:
            cache_target[cache_key] = result
        return result

    @classmethod
    def _find_project(cls, jac, project):
        def match_func(p):
            return (p.name.lower() == project.lower()
                    or p.key.lower() == project.lower() or p.id == project)

        def fetcher_func(all_projects, start_at):
            return all_projects[start_at:]

        return cls._find_and_cache(
            functools.partial(fetcher_func, jac.projects()), match_func,
            cls.cache.projects, project)

    @classmethod
    def _find_board(cls, jac, board, type='scrum'):
        def match_func(b):
            return (b.name.lower() == board.lower() or b.id == board)

        def fetcher_func(start_at):
            return jac.boards(type=type, startAt=start_at)

        return cls._find_and_cache(fetcher_func, match_func, cls.cache.boards,
                                   ":".join([board, type]))

    @classmethod
    def _find_sprint(cls, jac, board, board_name, ok_states):
        def match_func(s):
            return s.state.lower() in ok_states

        def fetcher_func(start_at):
            return jac.sprints(board.id, startAt=start_at)

        # We don't want to cache anything, since we expect sprints to
        # actually become active/inactive quite a bit...
        return cls._find_and_cache(fetcher_func, match_func, {}, None)

    @staticmethod
    def _create_issue(jac,
                      project,
                      secs_taken,
                      summary,
                      user_name,
                      channel_name='',
                      quick_link=None):
        mins_taken = secs_taken / 60.0
        hours_taken = mins_taken / 60.0
        days_taken = hours_taken / 24.0
        time_taken_pieces = [
            "%0.2f days" % (days_taken),
            "%0.2f hours" % (hours_taken),
            "%0.2f minutes" % (mins_taken),
            "%s seconds" % (secs_taken),
        ]
        time_taken_text = " or ".join(time_taken_pieces)
        new_issue_description_lines = [
            ("User @%s spent %s doing"
             " unplanned work.") % (user_name, time_taken_text),
        ]
        if channel_name:
            new_issue_description_lines.extend([
                "",
                "In channel: #%s" % channel_name,
            ])
        if quick_link:
            new_issue_description_lines.extend([
                "",
                "Reference: %s" % quick_link,
            ])
        new_issue_fields = {
            'summary': summary,
            'issuetype': {
                'name': 'Task',
            },
            'components': [{
                'name': "Unplanned"
            }],
            'assignee': {
                'name': user_name,
            },
            'project': project.id,
            'description': "\n".join(new_issue_description_lines),
        }
        new_issue = jac.create_issue(fields=new_issue_fields)
        new_issue_link = "<%s|%s>" % (new_issue.permalink(), new_issue.key)
        return (new_issue, new_issue_link)

    def _run(self, summary, time_taken, was_resolved, project, board):
        # Load and validate stuff (before doing work...)
        jac = self.bot.clients.jira_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        # This one is used here because it appears the the RTM one isn't
        # processing/sending links correctly (did it ever, but this one
        # does handle links right, so ya...)
        reply_attachments = functools.partial(
            self.message.reply_attachments,
            log=LOG,
            link_names=True,
            as_user=True,
            thread_ts=self.message.body.ts,
            channel=self.message.body.channel,
            unfurl_links=False)
        j_project = self._find_project(jac, project)
        if not j_project:
            raise excp.NotFound("Unable to find project '%s'" % (project))
        j_board = self._find_board(jac, board)
        if not j_board:
            raise excp.NotFound("Unable to find board '%s'" % (board))
        # Create it in that project...
        replier("Creating unplanned issue"
                " in project `%s`, please wait..." % (project))
        new_issue, new_issue_link = self._create_issue(
            jac,
            j_project,
            time_taken,
            summary,
            self.message.body.user_name,
            channel_name=self.message.body.get('channel_name'),
            quick_link=self.message.body.get('quick_link'))
        reply_attachments(attachments=[{
            'pretext': ("Created unplanned"
                        " issue %s.") % (new_issue_link),
            'mrkdwn_in': ['pretext'],
        }])
        # Find and bind it to currently active sprint (if any)...
        j_sprint = self._find_sprint(jac, j_board, board, ['active'])
        if j_sprint:
            reply_attachments(attachments=[{
                'pretext': ("Binding %s to active sprint `%s`"
                            " of board `%s`." %
                            (new_issue_link, j_sprint.name, board)),
                'mrkdwn_in': ['pretext'],
            }])
            jac.add_issues_to_sprint(j_sprint.id, [new_issue.key])
            reply_attachments(attachments=[{
                'pretext': ("Bound %s to active sprint `%s`"
                            " of board `%s`." %
                            (new_issue_link, j_sprint.name, board)),
                'mrkdwn_in': ['pretext'],
            }])
        else:
            replier("No active sprint found"
                    " in board `%s`, sprint binding skipped." % (board))
        # Mark it as done...
        if was_resolved:
            transition = None
            possible_transitions = set()
            for t in jac.transitions(new_issue.id):
                t_name = t.get('name', '')
                t_name = t_name.lower()
                if t_name in _RESOLVED_TRANSITIONS:
                    transition = t
                if t_name:
                    possible_transitions.add(t_name)
            if not transition:
                possible_transitions = sorted(possible_transitions)
                possible_transitions = " or ".join(
                    ["`%s`" % t.upper() for t in possible_transitions])
                ok_transitions = sorted(_RESOLVED_TRANSITIONS)
                ok_transitions = " or ".join(
                    ["`%s`" % t.upper() for t in ok_transitions])
                reply_attachments(attachments=[{
                    'pretext': ("Unable to resolve %s, could not find"
                                " issues %s"
                                " state transition!") %
                    (new_issue_link, ok_transitions),
                    'mrkdwn_in': ['pretext', 'text'],
                    "text": ("Allowable state"
                             " transitions: %s" % possible_transitions),
                }])
            else:
                reply_attachments(attachments=[{
                    'pretext': ("Transitioning %s issue to resolved, "
                                "please wait...") % (new_issue_link),
                    'mrkdwn_in': ['pretext'],
                }])
                jac.transition_issue(new_issue.id,
                                     transition['id'],
                                     comment="All done! kthxbye")
                replier("Transitioned.")
        replier = self.message.reply_text
        replier("Thanks for tracking your unplanned work!",
                prefixed=True,
                threaded=True)
Exemplo n.º 14
0
class Handler(handler.TriggeredHandler):
    """Triggers a workflow to downgrade/upgrade the version of this bot."""
    wait_jenkins_queue_item = 0.1
    config_section = 'updater'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('upgrade', takes_args=True),
            trigger.Trigger('update', takes_args=True),
            trigger.Trigger('upgrayedd', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': [
                'version',
            ],
            'help': {
                'version': ('version of padre container to deploy'
                            ' (must exist in artifactory), if'
                            ' not provided then the lastest will'
                            ' be found'),
            },
        }
    }
    required_clients = ('jenkins',)
    required_secrets = (
        'ci.artifactory.ro_account',
    )

    def _await_confirm(self, old_version, version, changelog_lines):
        def _show_others_active():
            active_handlers = len(self.bot.active_handlers)
            return ("There are %s other active"
                    # Remove one since thats the upgrade handler itself...
                    " handlers.") % (max(0, active_handlers - 1))
        pretext_lines = [
            "Newer version `%s` found!" % version,
            "I am older version `%s`." % old_version,
        ]
        text_lines = []
        if changelog_lines:
            text_lines.append("Last `%s` changes:" % len(changelog_lines))
            text_lines.extend(changelog_lines)
        attachments = [{
            'pretext': "\n".join(pretext_lines),
            'mrkdwn_in': ['pretext', 'text'],
            "text": "\n".join(text_lines),
        }]
        self.message.reply_attachments(
            text="Good %s." % self.date_wrangler.get_when(),
            attachments=attachments,
            link_names=True, as_user=True,
            channel=self.message.body.channel,
            log=LOG, thread_ts=self.message.body.get("ts"))
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False,
                                    thread_ts=self.message.body.ts)
        f = followers.ConfirmMe(confirms_what='upgrading',
                                confirm_self_ok=True,
                                check_func=_show_others_active)
        replier(f.generate_who_satisifies_message(self))
        self.wait_for_transition(follower=f, wait_timeout=300,
                                 wait_start_state='CONFIRMING')
        if self.state == 'CONFIRMED_CANCELLED':
            raise excp.Cancelled

    def _run(self, **kwargs):
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False)
        me = pkg_resources.get_distribution('padre')
        ro_account = self.bot.secrets.ci.artifactory.ro_account
        version = kwargs.get("version")
        version_provided = bool(version)
        project_url = self.bot.config.updater.project_url
        path = None
        if not version_provided:
            replier("Scanning artifactory, please wait...")
            newer_paths_it = uu.iter_updates(me.version,
                                             ro_account, project_url)
            newer_paths = sorted(newer_paths_it, key=lambda v: v.version)
            if newer_paths:
                path = newer_paths[-1].path
                version = str(newer_paths[-1].version)
        if not version:
            replier("No potentially upgradeable versions"
                    " found under '%s'" % project_url)
            return
        if me.version == version:
            replier("Nothing to upgrade, version desired is equivalent"
                    " to active version.")
            return
        if path is None:
            tmp_path = uu.check_fetch_version(version, ro_account, project_url)
            path = tmp_path.path
        self._await_confirm(
            me.version, version, uu.extract_changelog(path))
        self.change_state("UPGRADING")
        jenkins_job = self.config.jenkins_job
        jenkins_client = self.bot.clients.jenkins_client
        job = jenkins_client.get_job(jenkins_job)
        if job is not None:
            replier(
                "Triggering upgrade to"
                " version `%s` by kicking job `%s`." % (version,
                                                        jenkins_job))
            qi = job.invoke(build_params={
                'image_tag': version,
                'bot': self.bot.name or "",
            })
            replier("Your upgrade to `%s` job"
                    " has been queued." % version)
            build = None
            while build is None:
                if self.dead.is_set():
                    # Oh well, someone else killed us...
                    raise excp.Dying
                qi.poll()
                build = qi.get_build()
                if build is None:
                    self.dead.wait(self.wait_jenkins_queue_item)
            replier(
                "Your upgrade to `%s` job has"
                " started at %s. I am going into stealth/quiet"
                " mode until then (resurrection expected in %0.2f"
                " seconds), goodbye..." % (version, build.url,
                                           build.get_eta()))
            self.bot.quiescing = True
            self.bot.scheduler.shutdown(wait=False)
        else:
            raise excp.NotFound(
                "Jenkins upgrade job '%s' was not"
                " found" % jenkins_job)
Exemplo n.º 15
0
class ListHandler(handler.TriggeredHandler):
    """Lists the members of a ldap group."""

    required_clients = ("ldap", "github")
    max_before_gist = 100
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('ldap list', takes_args=True),
        ],
        'args': {
            'order': [
                'group',
            ],
            'help': {
                'group': 'ldap group to list',
            },
            'schema':
            Schema({
                Required("group"): All(scu.string_types(), Length(min=1)),
            }),
        },
    }

    def _run(self, group):
        replier = self.message.reply_text
        ldap_client = self.bot.clients.ldap_client
        group_members = [
            ldap_utils.explode_member(member)
            for member in ldap_client.list_ldap_group(group)
        ]
        group_members = sorted(group_members, key=lambda m: m.get("CN"))
        tbl_headers = ['CN', 'DC', 'OU']
        rows = []
        for member in group_members:
            row = []
            for k in tbl_headers:
                v = member.get(k)
                if isinstance(v, list):
                    v = ", ".join(v)
                row.append(v)
            rows.append(row)
        if len(group_members) <= self.max_before_gist:
            lines = [
                "```",
                tabulate.tabulate(rows, headers=tbl_headers),
                "```",
            ]
            replier("\n".join(lines), threaded=True, prefixed=False)
        else:
            github_client = self.bot.clients.github_client
            me = github_client.get_user()
            to_send = {}
            upload_what = [
                ('listing', tabulate.tabulate(rows, headers=tbl_headers)),
            ]
            for what_name, contents in upload_what:
                # Github has upper limit on postings to 1MB
                contents = _chop(contents, units.Mi)
                contents = contents.strip()
                name = what_name + ".txt"
                to_send[name] = github.InputFileContent(contents)
            if to_send:
                try:
                    gist = me.create_gist(True, to_send)
                except Exception:
                    LOG.warning(
                        "Failed uploading gist for listing"
                        " of '%s' ldap group", group)
                else:
                    lines = [
                        "Gist url at: %s" % gist.html_url,
                    ]
                    replier("\n".join(lines), threaded=True, prefixed=False)
Exemplo n.º 16
0
class CalcSizeHandler(handler.TriggeredHandler):
    """Determines size of docker artifactory repositories."""

    config_section = 'artifactory'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('artifactory calculate size', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': ['project'],
            'help': {
                'project': 'project to scan',
            },
            'schema': Schema({
                Required("project"): All(su.string_types(), Length(min=1)),
            }),
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
    )
    required_configurations = ('base_url',)

    def _run(self, project):
        ro_account = self.bot.secrets.ci.artifactory.ro_account

        path = _find_path(self.config, project, ro_account)
        if not path:
            raise excp.NotFound("Could not find project '%s'" % project)

        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        replier("Determining current size of `%s`, please"
                " wait..." % project)

        all_sizes = [
            path.stat().size,
        ]
        child_paths = list(path.iterdir())
        child_paths = sorted(child_paths, key=lambda p: p.name)
        if child_paths:
            c_pbar = self.message.make_progress_bar(
                len(child_paths), update_period=_calc_emit_every(child_paths))
            for child_path in c_pbar.wrap_iter(child_paths):
                if self.dead.is_set():
                    break
                all_sizes.append(child_path.stat().size)
                replier("Determining total size"
                        " of top-level child `%s`, please"
                        " wait..." % child_path.name)
                sub_child_paths = list(child_path.iterdir())
                if sub_child_paths:
                    sc_pbar = self.message.make_progress_bar(
                        len(sub_child_paths),
                        update_period=_calc_emit_every(sub_child_paths))
                    for sub_child_path in sc_pbar.wrap_iter(sub_child_paths):
                        if self.dead.is_set():
                            break
                        try:
                            sub_child_size = _calc_docker_size(
                                sub_child_path, sub_child_path.stat().size)
                        except excp.NotFound:
                            sub_child_size = 0
                            for size in _iter_sizes_deep(sub_child_path):
                                if self.dead.is_set():
                                    break
                                sub_child_size += size
                        all_sizes.append(sub_child_size)

        if self.dead.is_set():
            replier("Died during scanning, please"
                    " try again next time...")
        else:
            replier(
                "Size of `%s` is %s" % (project,
                                        utils.format_bytes(
                                            sum(all_sizes), quote=True)))
Exemplo n.º 17
0
class SyncHandler(handler.TriggeredHandler):
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('sync repo', True),
        ],
        'args': {
            'order': [
                'downstream_url',
                'upstream_url',
                'upstream_branch_refs',
                'upstream_tag_refs',
                'upstream_tags_as_branches_refs',
                'patch_repo_url',
                'patch_branch',
            ],
            'defaults': {
                'downstream_url': None,
                'upstream_url': None,
                'upstream_branch_refs': "master",
                'upstream_tag_refs': "",
                'upstream_tags_as_branches_refs': "",
                'patch_repo_url': "",
                'patch_branch': "master",
            },
            'converters': {},
            'schema':
            Schema({
                Required("downstream_url"):
                All(scu.string_types(), Length(min=1)),
                Required("upstream_url"):
                All(scu.string_types(), Length(min=1)),
                Optional("upstream_branch_refs"):
                All(scu.string_types(), Length(min=1)),
                Optional("upstream_tag_refs"):
                scu.string_types(),
                Optional("upstream_tags_as_branches_refs"):
                scu.string_types(),
                Optional("patch_repo_url"):
                scu.string_types(),
                Optional("patch_branch"):
                All(scu.string_types(), Length(min=1)),
            }),
            'help': {
                'downstream_url':
                "Which downstream git url to sync into?",
                'upstream_url':
                "Which upstream git url to sync from?",
                'upstream_branch_refs':
                "Which upstream branches to sync into downstream?",
                'upstream_tag_refs':
                "Which upstream tags to sync into downstream?",
                'upstream_tags_as_branches_refs':
                "Which upstream tags to sync into downstream as branches?",
            },
        },
    }
    required_clients = ('github', )
    periodic_config_path = "github.periodics"

    @staticmethod
    def _format_voluptuous_error(data,
                                 validation_error,
                                 max_sub_error_length=500):
        errors = []
        if isinstance(validation_error, MultipleInvalid):
            errors.extend(
                sorted(sub_error.path[0]
                       for sub_error in validation_error.errors))
        else:
            errors.append(validation_error.path[0])

        errors = ['`{}`'.format(e) for e in errors]
        if len(errors) == 1:
            adj = ''
            vars = errors[0]
            verb = 'is'
        elif len(errors) == 2:
            adj = 'Both of '
            vars = ' and '.join(errors)
            verb = 'are'
        else:
            adj = 'All of '
            vars = bytearray(b', '.join(errors))
            last_comma = vars.rfind(', ')
            vars[last_comma:last_comma + 2] = b', and '
            verb = 'are'
            vars = vars.decode('utf-8')

        return 'Error: {adj}{vars} {verb} required.'.format(adj=adj,
                                                            vars=vars,
                                                            verb=verb)

    def _run(self, downstream_url, upstream_url, upstream_branch_refs,
             upstream_tag_refs, upstream_tags_as_branches_refs, patch_repo_url,
             patch_branch):

        tmp_upstream_branch_refs = []
        for upstream_branch in upstream_branch_refs.split(","):
            upstream_branch = upstream_branch.strip()
            if upstream_branch:
                tmp_upstream_branch_refs.append(upstream_branch)
        upstream_branch_refs = tmp_upstream_branch_refs

        tmp_upstream_tags_refs = []
        for upstream_tag in upstream_tag_refs.split(","):
            upstream_tag = upstream_tag.strip()
            if upstream_tag:
                tmp_upstream_tags_refs.append(upstream_tag)
        upstream_tag_refs = tmp_upstream_tags_refs

        tmp_upstream_tags_as_branches_refs = []
        for upstream_tag_branch in upstream_tags_as_branches_refs.split(","):
            upstream_tag_branch = upstream_tag_branch.strip()
            if upstream_tag_branch:
                tmp_pieces = upstream_tag_branch.split(":", 2)
                tmp_tag = tmp_pieces[0]
                tmp_branch = tmp_pieces[1]
                tmp_upstream_tags_as_branches_refs.append(
                    [tmp_tag, tmp_branch])
        upstream_tags_as_branches_refs = tmp_upstream_tags_as_branches_refs

        project = upstream_url.split('/')
        project = project[-1] or project[-2]

        self.message.reply_text("Syncing repository for project `%s`..." %
                                project,
                                threaded=True,
                                prefixed=False)

        # Make temp dir for run
        tmp_dir_prefix = "github_sync_{}".format(project)
        with utils.make_tmp_dir(dir=self.bot.config.working_dir,
                                prefix=tmp_dir_prefix) as tmp_dir:
            # Clone the source repo
            try:
                source_repo = git.Repo.clone_from(
                    upstream_url, os.path.join(tmp_dir, 'source'))
                self.message.reply_text(
                    ":partyparrot: Successfully loaded repository `%s`." %
                    project,
                    threaded=True,
                    prefixed=False)
            except Exception:
                self.message.reply_text(
                    ":sadparrot: Failed to load repository `%s`." % project,
                    threaded=True,
                    prefixed=False)
                return

            # Now check patches, if we know what patch repo to use
            if patch_repo_url:
                self.message.reply_text(
                    "Checking patch compatibility for `%s` branch `%s`." %
                    (project, patch_branch),
                    threaded=True,
                    prefixed=False)

                # Clone the patch repo
                patch_repo = git.Repo.clone_from(
                    patch_repo_url, os.path.join(tmp_dir, 'patches'))
                head_commit = patch_repo.head.commit.hexsha

                # Validate patches
                r = process_utils.run(
                    [
                        'update-patches', '--branch-override', patch_branch,
                        '--patch-repo', patch_repo.working_dir
                    ],
                    cwd=os.path.join(tmp_dir, "source")  # from sync() above
                )
                try:
                    r.raise_for_status()
                    self.message.reply_text(
                        ":gdhotdog: Patch compatibility check successful.",
                        threaded=True,
                        prefixed=False)
                except process_utils.ProcessExecutionError:
                    self.message.reply_text(
                        "Patch compatibility check failed. Please do a manual "
                        "rebase!",
                        threaded=True,
                        prefixed=False)
                    attachment = {
                        'text': (":warning:"
                                 " Patches are in merge conflict in the"
                                 " repository `%s`. Manual intervention"
                                 " is required!") % project,
                        'mrkdwn_in': ['text'],
                        'color':
                        su.COLORS.purple,
                    }
                    self.message.reply_attachments(
                        attachments=[attachment],
                        log=LOG,
                        as_user=True,
                        text=' ',
                        channel=self.config.admin_channel,
                        unfurl_links=True)
                    return

                # If we made an auto-commit, PR it
                if patch_repo.head.commit.hexsha == head_commit:
                    self.message.reply_text("No patch updates detected.",
                                            threaded=True,
                                            prefixed=False)
                else:
                    new_branch = '{project}_{short_hash}'.format(
                        project=project,
                        short_hash=patch_repo.head.commit.hexsha[:8])
                    new_refspec = 'HEAD:{branch}'.format(branch=new_branch)
                    self.message.reply_text(
                        "Pushing patch updates to branch `{branch}`.".format(
                            branch=new_branch),
                        threaded=True,
                        prefixed=False)
                    patch_repo.remote().push(refspec=new_refspec)
                    patch_repo_name = patch_repo_url.split(":")[-1]
                    patch_repo_name = patch_repo_name.split('.git')[0]
                    gh_repo = self.bot.clients.github_client.get_repo(
                        patch_repo_name)
                    title, body = patch_repo.head.commit.message.split('\n', 1)
                    self.message.reply_text("Creating pull request...",
                                            threaded=True,
                                            prefixed=False)
                    pr = gh_repo.create_pull(title=title,
                                             body=body.strip(),
                                             base="master",
                                             head=new_branch)
                    self.message.reply_text(
                        ":gunter: Pull request created: {url}".format(
                            url=pr.html_url),
                        threaded=True,
                        prefixed=False)

            # Finish syncing the repo by pushing the new state
            self.message.reply_text("Pushing upstream state downstream...",
                                    threaded=True,
                                    prefixed=False)
            source_repo.heads.master.checkout()
            source_repo.remote().fetch()
            retval = git_utils.sync_push(
                working_folder=tmp_dir,
                target=downstream_url,
                push_tags=upstream_tag_refs,
                push_branches=upstream_branch_refs,
                push_tags_to_branches=upstream_tags_as_branches_refs)
            if retval == 0:
                self.message.reply_text(
                    ":partyparrot: Successfully pushed repository `%s`." %
                    project,
                    threaded=True,
                    prefixed=False)
            else:
                self.message.reply_text(
                    ":sadparrot: Failed to push repository `%s`." % project,
                    threaded=True,
                    prefixed=False)
                return
            self.message.reply_text(":beers: Done.",
                                    threaded=True,
                                    prefixed=False)
Exemplo n.º 18
0
class PRScanReportHandler(handler.TriggeredHandler):
    """Scans configured github orgs & repos and produces a PR report."""

    config_section = 'github'
    required_clients = ('github', )
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('pull request report', takes_args=True),
        ],
        'args': {
            'order': [
                'org_repo',
            ],
            'converters': {
                'org_repo': _extract_orgs_repos,
            },
            'schema': Schema({
                Required("org_repo"): All(list, Length(min=1)),
            }),
            'help': {
                'org_repo': ("organization and/or organization/repo to"
                             " interrogate (comma separated if many)"),
            },
        },
    }

    @staticmethod
    def _determine_age(pull, now):
        if pull.created_at > now:
            return 'From the _future_'
        created_at_diff = now - pull.created_at
        created_at_diff_secs = created_at_diff.total_seconds()
        if created_at_diff_secs <= (12 * 3600):
            return "*Very* _fresh_"
        if created_at_diff_secs <= 86400:
            return "_Fresh_"
        if created_at_diff_secs <= (3 * 86400):
            return "*Mostly* _fresh_"
        if created_at_diff_secs <= (7 * 86400):
            return "_Molding_"
        if created_at_diff_secs <= (14 * 86400):
            return "*Heavily* _molding_"
        if created_at_diff_secs <= (21 * 86400):
            return "_Rotting_"
        if created_at_diff_secs <= (28 * 86400):
            return "*Heavily* _rotting_"
        return "*Unidentifiable*"

    @classmethod
    def _iter_pull_attachments(cls, now, pulls):
        for p in sorted(pulls, key=lambda p: p.pull.created_at):
            attachment = {
                'pretext':
                u"• %s PR created by <%s|%s>" % (cls._determine_age(
                    p.pull, now), p.pull.user.html_url, p.pull.user.name),
                'mrkdwn_in': ["pretext"],
                'text':
                p.pull.title,
                'title':
                ("%s/%s - PR #%s") % (p.org_name, p.repo_name, p.pull.number),
                'title_link':
                p.pull.html_url,
                'fields':
                _format_pr_fields(p.pull, now=now),
            }
            yield attachment

    def _run(self, org_repo):
        gh = self.bot.clients.github_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)

        # TODO: The pygithub api seems to drop timestamps TZ (seems to always
        # be in zulu time, so due to this we can't use our timezone
        # specific comparison)... file a bug sometime.
        now = self.date_wrangler.get_now()
        now = now.astimezone(pytz.UTC)
        now = now.replace(tzinfo=None)

        seen_full_orgs = set()
        for entry in org_repo:
            org_name, repo_name = entry
            if org_name in seen_full_orgs:
                continue
            if not repo_name:
                seen_full_orgs.add(org_name)
                replier("Scanning `%s` organization,"
                        " please wait..." % (org_name))
                gh_org = gh.get_organization(org_name)
                gh_org_repos = [(repo.name, repo)
                                for repo in gh_org.get_repos('public')]
                emit_repo = True
            else:
                replier("Scanning `%s/%s` organization"
                        " repository, please wait..." % (org_name, repo_name))
                gh_org = gh.get_organization(org_name)
                gh_org_repos = [
                    (repo_name, gh_org.get_repo(repo_name)),
                ]
                emit_repo = False
            for repo_name, gh_repo in gh_org_repos:
                if emit_repo:
                    replier("Scanning `%s/%s` organization"
                            " repository, please wait..." %
                            (org_name, repo_name))
                gh_repo_pulls = []
                for gh_pull in gh_repo.get_pulls(state='open'):
                    gh_repo_pulls.append(
                        munch.Munch({
                            'pull': gh_pull,
                            'repo': gh_repo,
                            'org': gh_org,
                            'org_name': org_name,
                            'repo_name': repo_name,
                        }))
                replier('Discovered `%s` open'
                        ' pull requests.' % (len(gh_repo_pulls)))
                attachments = list(
                    self._iter_pull_attachments(now, gh_repo_pulls))
                self.message.reply_attachments(
                    channel=self.message.body.channel,
                    text=None,
                    link_names=True,
                    as_user=True,
                    unfurl_links=True,
                    attachments=attachments,
                    log=LOG,
                    thread_ts=self.message.body.ts)
Exemplo n.º 19
0
class Handler(handler.TriggeredHandler):
    """Shows you what this bot changelog is (or another bot version)."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('changelog', takes_args=True),
        ],
        'args': {
            'order': [
                'version',
            ],
            'defaults': {
                'version': None,
            },
            'help': {
                'version': "optional version to get changelog of",
            },
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
    )
    requires_slack_sender = True
    required_configurations = (
        'updater.project_url',
    )

    def _run(self, version=None):
        if not version:
            me = pkg_resources.get_distribution('padre')
            version = me.version
        project_url = self.bot.config.updater.project_url
        ro_account = self.bot.secrets.ci.artifactory.ro_account
        try:
            tmp_path = uu.check_fetch_version(
                version, ro_account, project_url)
        except excp.NotFound:
            replier = self.message.reply_text
            replier("No version `%s` found. Does it exist?" % version,
                    threaded=True, prefixed=False)
        else:
            changelog_lines = uu.extract_changelog(tmp_path.path)
            if not changelog_lines:
                pretext_lines = [
                    "No changelog found for `%s`." % version,
                ]
            else:
                pretext_lines = [
                    "Here is the changelog for `%s`." % version,
                ]
            text_lines = []
            if changelog_lines:
                text_lines.append("Changes captured:")
                text_lines.extend(changelog_lines)
            attachments = [{
                'pretext': "\n".join(pretext_lines),
                'mrkdwn_in': ['pretext', 'text'],
                "text": "\n".join(text_lines),
            }]
            slack_sender = self.bot.slack_sender
            slack_sender.post_send(
                text="Good %s." % self.date_wrangler.get_when(),
                attachments=attachments,
                link_names=True, as_user=True,
                channel=self.message.body.channel,
                log=LOG, thread_ts=self.message.body.get("ts"))
Exemplo n.º 20
0
class Handler(handler.TriggeredHandler):
    """Shows you what this bot can do."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('help', takes_args=True),
        ],
        'args': {
            'order': [
                'trigger',
            ],
            'converters': {},
            'help': {
                'trigger': "optional target trigger to get help on",
            },
        },
    }

    def _run(self, trigger=None):
        attachments = []
        if trigger:
            target_h = None
            for h in self.bot.handlers:
                if not h.has_help():
                    continue
                h_triggers = h.handles_what.get("triggers", [])
                h_match = False
                for h_trigger in h_triggers:
                    matched, _args = h_trigger.match(trigger)
                    if matched:
                        h_match = True
                        break
                if h_match:
                    target_h = h
                    break
            if target_h is None:
                replier = self.message.reply_text
                replier("Sorry I do not know of any"
                        " trigger `%s` (pick another?)" % trigger,
                        threaded=True,
                        prefixed=False)
            else:
                title, how_to = target_h.get_help(self.bot)
                attachments.append({
                    'pretext': u"• %s" % title,
                    'text': "\n".join(how_to),
                    'mrkdwn_in': ['pretext', 'text'],
                })
        else:
            for h in self.bot.handlers:
                if not h.has_help():
                    continue
                title, how_to = h.get_help(self.bot)
                attachment = {
                    'pretext': u"• %s" % title,
                    'text': "\n".join(how_to),
                    'mrkdwn_in': ['pretext', 'text'],
                }
                attachments.append(attachment)
        if attachments:
            self.message.reply_attachments(attachments=attachments,
                                           log=LOG,
                                           link_names=True,
                                           as_user=True,
                                           thread_ts=self.message.body.ts,
                                           channel=self.message.body.channel,
                                           unfurl_links=False,
                                           simulate_typing=False)
Exemplo n.º 21
0
class DumpHandler(handler.TriggeredHandler):
    """Dumps what is in the bots brain about a given user."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('brain dump', takes_args=True),
        ],
        'args': {
            'order': [
                'user',
            ],
            'defaults': {
                'user': "",
            },
            'help': {
                'user': ("user id or user name"
                         " to dump (leave empty to dump calling user)"),
            },
        },
    }

    def _fetch_known(self, real_user):
        user_memory = {}
        with self.bot.locks.brain:
            alias_key = "user:%s" % real_user.id
            try:
                user_memory['aliases'] = dict(self.bot.brain[alias_key])
            except KeyError:
                pass
        return user_memory

    def _run(self, user=''):
        real_user = _resolve_user(self.bot, self.message, user=user)
        replier = self.message.reply_text
        if not real_user.id:
            if user:
                replier("Unable to find user `%s`." % user,
                        threaded=True,
                        prefixed=False)
            else:
                replier("Unable to find calling user.",
                        threaded=True,
                        prefixed=False)
        else:
            if not user:
                user = real_user.name
            if not user:
                user = real_user.id
            user_memory = self._fetch_known(real_user)
            if not user_memory:
                replier("Nothing internalized about `%s`." % user,
                        threaded=True,
                        prefixed=True)
            else:
                lines = []
                lines.append("I have internalized the"
                             " following about `%s`:" % user)
                user_memory = utils.prettify_yaml(
                    utils.mask_dict_password(user_memory),
                    explicit_end=False,
                    explicit_start=False)
                lines.extend([
                    "```",
                    user_memory.strip(),
                    "```",
                ])
                replier("\n".join(lines), threaded=True, prefixed=False)
Exemplo n.º 22
0
class Handler(handler.TriggeredHandler):
    """Searches various elastic indexes for things in (log)message fields."""

    index_and_query = [
        ('dcr.compute_*-*', 'message:"%(thing)s"'),
        ('dcr.openstack_logstash-*', 'logmessage:"%(thing)s"'),
    ]

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('elastic search logs', takes_args=True),
        ],
        'authorizer':
        auth.message_from_channels(['openstack', 'team-openstack-eng']),
        'args': {
            'order': ['thing'],
            'help': {
                'thing': 'thing to find logs for',
            },
            'schema':
            Schema({
                Required("thing"): All(su.string_types(), Length(min=1)),
            }),
        },
    }
    required_clients = (
        'github',
        'elastic',
    )

    @staticmethod
    def _chop(fh, max_am):
        left, contents = utils.read_backwards_up_to(fh, max_am)
        if left:
            tmp_contents = "%s more..." % left
            tmp_contents += " " + contents
            contents = tmp_contents
        return contents

    def _run(self, thing):
        github_client = self.bot.clients.github_client
        elastic_client = self.bot.clients.elastic_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        replier("Initiating scan for `%s`." % thing)
        to_send = {}
        for index, query_tpl in self.index_and_query:
            query = query_tpl % {'thing': thing}
            replier("Scanning index `%s` using query `%s`." % (index, query))
            s = (e_dsl.Search(using=elastic_client).query(
                "query_string", query=query).sort("-@timestamp").index(index))
            s_buf = six.StringIO()
            for i, h in enumerate(s.scan()):
                h_header = "Hit %s" % (i + 1)
                h_header_delim = "-" * len(h_header)
                h_header += "\n"
                h_header += h_header_delim
                h_header += "\n"
                s_buf.write(h_header)
                s_buf.write(_format_hit(h))
                s_buf.write("\n")
            # Github has upper limit on postings to 1MB
            s_buf = self._chop(s_buf, units.Mi)
            if s_buf:
                # Because github...
                s_buf_name = re.sub(r"\.|\-|\*|_", "", index)
                s_buf_name = s_buf_name + ".txt"
                to_send[s_buf_name] = ghe.InputFileContent(s_buf)
        if not to_send:
            replier("No scan results found.")
        else:
            replier("Uploading %s scan results to gist." % len(to_send))
            me = github_client.get_user()
            gist = me.create_gist(True, to_send)
            replier("Gist url at: %s" % gist.html_url)
Exemplo n.º 23
0
class PruneHandler(handler.TriggeredHandler):
    """Prunes a docker artifactory repositories."""

    config_section = 'artifactory'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('artifactory prune', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': ['project', 'target_size'],
            'help': {
                'project': 'project to scan',
                'target_size': 'target size to prune project repo to',
            },
            'schema': Schema({
                Required("project"): All(su.string_types(), Length(min=1)),
                Required("target_size"): All(int, Range(min=0)),
            }),
            'converters': {
                'target_size': functools.partial(strutils.string_to_bytes,
                                                 return_int=True,
                                                 # Because artifactory
                                                 # is using the SI
                                                 # system... arg...
                                                 unit_system='SI'),
            },
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
        'ci.artifactory.push_account',
    )
    required_configurations = ('base_url',)

    def _do_prune(self, prune_what):
        dirs_pruned = 0
        files_pruned = 0
        was_finished = True
        pbar = self.message.make_progress_bar(
            len(prune_what), update_period=_calc_emit_every(prune_what))
        for child in pbar.wrap_iter(prune_what):
            if self.dead.is_set():
                was_finished = False
                break
            stack = collections.deque()
            stack.append((child.path, False))
            while stack:
                # NOTE: we do not check dead.is_set() here which might
                # be ok, but is done so that we don't delete a sub child
                # half-way (which if done may leave any docker images
                # half-way-working... ie missing components/layers...
                # which would be bad).
                p, p_visited = stack.pop()
                p_is_dir = p.is_dir()
                if p_is_dir and not p_visited:
                    stack.append((p, True))
                    stack.extend((c_p, False) for c_p in p.iterdir())
                elif p_is_dir and p_visited:
                    p.rmdir()
                    dirs_pruned += 1
                else:
                    p.unlink()
                    files_pruned += 1
        return (dirs_pruned, files_pruned, was_finished)

    def _do_scan(self, replier, path, target_size):
        root_child_paths = list(path.iterdir())
        all_sub_children = []
        replier("Finding all sub-children of"
                " %s top-level children." % len(root_child_paths))

        if root_child_paths:
            pbar = self.message.make_progress_bar(
                len(root_child_paths),
                update_period=_calc_emit_every(root_child_paths))
            for child_path in pbar.wrap_iter(root_child_paths):
                if self.dead.is_set():
                    raise excp.Dying
                replier("Scanning top-level"
                        " child `%s`, please wait..." % child_path.name)
                sub_child_paths = list(child_path.iterdir())
                if sub_child_paths:
                    rc_pbar = self.message.make_progress_bar(
                        len(sub_child_paths),
                        update_period=_calc_emit_every(sub_child_paths))
                    for sub_child_path in rc_pbar.wrap_iter(sub_child_paths):
                        if self.dead.is_set():
                            raise excp.Dying
                        all_sub_children.append(munch.Munch({
                            'path': sub_child_path,
                            'frozen': _is_frozen(sub_child_path),
                            'ctime': sub_child_path.stat().ctime,
                            'size': sub_child_path.stat().size,
                            'parent': child_path,
                        }))

        all_sub_children = sorted(all_sub_children, key=lambda p: p.ctime)
        num_childs_frozen = sum(int(sc.frozen) for sc in all_sub_children)
        replier("Determining total sizes"
                " of %s sub-children"
                " (%s are frozen)." % (len(all_sub_children),
                                       num_childs_frozen))
        if all_sub_children:
            pbar = self.message.make_progress_bar(
                len(all_sub_children),
                update_period=_calc_emit_every(all_sub_children))
            for sub_child in pbar.wrap_iter(all_sub_children):
                if self.dead.is_set():
                    raise excp.Dying
                try:
                    total_size = _calc_docker_size(sub_child.path,
                                                   sub_child.size)
                except excp.NotFound:
                    total_size = 0
                    for size in _iter_sizes_deep(sub_child.path):
                        if self.dead.is_set():
                            raise excp.Dying
                        total_size += size
                sub_child.total_size = total_size

        accum_size = 0
        prune_what = []
        for sub_child in reversed(all_sub_children):
            if sub_child.frozen:
                continue
            accum_size += sub_child.total_size
            if accum_size >= target_size:
                prune_what.append(sub_child)
        prune_what.reverse()
        return prune_what

    def _format_child(self, child):
        try:
            child_pretext = "%s/%s" % (child.parent.name, child.path.name)
        except AttributeError:
            child_pretext = "%s" % child.path.name
        attachment = {
            'pretext': child_pretext,
            'mrkdwn_in': [],
            'footer': "Artifactory",
            'footer_icon': ART_ICON,
        }
        tot_size = utils.format_bytes(child.total_size)
        attachment['fields'] = [
            {
                'title': 'Size',
                'value': tot_size,
                'short': utils.is_short(tot_size),
            },
            {
                "title": "Created on",
                "value": _format_dt(child.ctime),
                "short": True,
            },
        ]
        return attachment

    def _run(self, project, target_size):
        push_account = self.bot.secrets.ci.artifactory.push_account
        path = _find_path(self.config, project, push_account)
        if not path:
            raise excp.NotFound("Could not find project '%s'" % project)
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False)
        replier("Scanning `%s`, please wait..." % project)
        try:
            prune_what = self._do_scan(replier, path, target_size)
        except excp.Dying:
            replier("Died during scanning, please try"
                    " again next time...")
            return
        if not prune_what:
            replier("Nothing to prune found.")
            return
        self.message.reply_attachments(
            attachments=list(self._format_child(c) for c in prune_what),
            log=LOG, link_names=True, as_user=True,
            thread_ts=self.message.body.ts,
            channel=self.message.body.channel)
        replier("Please confirm the pruning of"
                " %s paths." % len(prune_what))
        f = followers.ConfirmMe(confirms_what='pruning')
        replier(f.generate_who_satisifies_message(self))
        self.wait_for_transition(wait_timeout=300, follower=f,
                                 wait_start_state='CONFIRMING')
        if self.state != 'CONFIRMED_CANCELLED':
            self.change_state("PRUNING")
            replier("Initiating prune of %s paths." % len(prune_what))
            dirs_pruned, files_pruned, done = self._do_prune(prune_what)
            replier("Pruned %s directories and"
                    " %s files." % (dirs_pruned, files_pruned))
            if not done:
                replier("This was a partial prune, since I died"
                        " during pruning, please try"
                        " again next time...")
        else:
            replier("Pruning cancelled.")
Exemplo n.º 24
0
class DescribeServerHandler(Searcher, handler.TriggeredHandler):
    """Finds a virtual machine and describes it.

    This is on purpose eerily similar to OSC `server show` command.
    """

    requires_topo_loader = True
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack server show', takes_args=True),
        ],
        'args': {
            'order': [
                'server',
                'only_private',
                'cloud',
            ],
            'help': {
                'server':
                'server (name or id)',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
            },
            'converters': {
                'only_private': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("server"): All(su.string_types(), Length(min=1)),
                Required("only_private"): bool,
                Required("cloud"): su.string_types(),
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud_viewers'),
    }

    def _run(self, server, only_private=True, cloud=''):
        # Search should be unique across clouds
        target_search = True
        if uuidutils.is_uuid_like(server):
            # Find by UUID
            filters = {'uuid': server}
        elif netutils.is_valid_ip(server):
            # Find by IP address
            # Note: Much more expensive. Calling when exactly like an IP.
            filters = {'ip': _force_exact(server)}
        else:
            # Find by name (across all clouds)
            filters = {'name': _force_exact(server)}
            target_search = False  # Name could exist in multiple clouds
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        servers, searched_clouds, _found_clouds = self._search(
            server,
            filters,
            target_search=target_search,
            only_private=only_private,
            cloud=cloud,
            replier=replier)
        if servers:
            self._emit_servers(servers)
            replier("Found %s possible matches, hopefully one of"
                    " them was what you were looking for..." % len(servers))
        else:
            replier("Sorry I could not find `%s` in %s clouds,"
                    " try another?" % (server, searched_clouds))
Exemplo n.º 25
0
class ConsoleHandler(handler.TriggeredHandler):
    """Gets a jenkins jobs console log."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins console', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
                'build',
                'lines',
            ],
            'schema':
            Schema({
                Required("job_name"): All(scu.string_types(), Length(min=1)),
                Required("build"): int,
                Required("lines"): int,
            }),
            'converters': {
                'build': int,
                'lines': int,
            },
            'defaults': {
                'lines': CONSOLE_LINES,
            },
            'help': {
                'job_name':
                "job name to fetch",
                "build":
                "build identifier to fetch",
                "lines": ("maximum number of lines from the"
                          " console to respond"
                          " with (negative for no limit)"),
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, job_name, build, lines):
        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        replier("Fetching job `%s` build `%s`"
                " console, please wait..." % (job_name, build))
        clients = self.bot.clients
        job = clients.jenkins_client.get_job(job_name)
        if job is None:
            replier("Job `%s` was not found!" % job_name)
            return
        build_num = build
        build = job.get_build(build_num)
        if build is not None:
            console_out = build.get_console()
            console_out = _format_build_console(console_out, line_limit=lines)
            replier(console_out)
        else:
            replier("Job `%s` build `%s` was"
                    " not found!" % (job_name, build_num))
Exemplo n.º 26
0
class NotifyOwnersOfServersOnHypervisor(Searcher, handler.TriggeredHandler):
    """Notify some owners of VMs on a hypervisor about something."""

    requires_topo_loader = True
    required_clients = ('ecm', )
    confirms_action = 'notification'
    confirms_what = 'something'
    template_subdir = 'maintenance'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack hypervisor notify-vm-owners',
                            takes_args=True),
        ],
        'args': {
            'order': [
                'hypervisor',
                'template',
                'what',
                'description',
                'when',
                'only_private',
                'cloud',
                # Various ecm passthroughs...
                'test_mode',
                'notify_slack',
                'notify_email',
            ],
            'help': {
                'hypervisor':
                'hypervisor to find vms on',
                'template':
                "notification template to use",
                'what':
                'one word for what is about to happen',
                'when': ("when the event is going to happen"
                         " in iso8601 format (if not"
                         " provided then the current time is used)"),
                'description':
                'multiple words for what is about to happen',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
                # Various ecm passthroughs...
                'test_mode':
                'ecm notification api test mode passthrough',
                'notify_slack':
                'send notification via slack',
                'notify_email':
                'send notification via email',
            },
            # This will be filled in during setup_class call (since it
            # needs semi-dynamic information from the bot configuration).
            'converters': {},
            'schema':
            Schema({
                Required("hypervisor"):
                All(su.string_types(), Length(min=1)),
                Required("only_private"):
                bool,
                Required("cloud"):
                su.string_types(),
                Required("what"):
                All(su.string_types(), Length(min=1)),
                Required("description"):
                All(su.string_types(), Length(min=1)),
                Required("template"):
                All(su.string_types(), Length(min=1)),
                Required("when"):
                Any(None, datetime.datetime),
                # Various ecm passthroughs...
                Required("test_mode"):
                bool,
                Required("notify_email"):
                bool,
                Required("notify_slack"):
                bool,
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
                'when': None,
                # Various ecm passthroughs...
                'test_mode': False,
                'notify_slack': True,
                'notify_email': True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    @classmethod
    def setup_class(cls, bot):
        tz = bot.config.tz
        cls.handles_what['args']['converters'].update({
            'only_private':
            hu.strict_bool_from_string,
            'when':
            functools.partial(_convert_dt, pytz.timezone(tz)),
            # Various ecm passthroughs...
            'test_mode':
            hu.strict_bool_from_string,
            'notify_slack':
            hu.strict_bool_from_string,
            'notify_email':
            hu.strict_bool_from_string,
        })

    def _build_template(self,
                        servers,
                        hypervisor,
                        template,
                        what,
                        when,
                        description,
                        test_mode=False):
        tmp_servers = []
        for s in servers:
            s_owner = None
            try:
                s_owner = s.metadata.owning_group
            except AttributeError:
                pass
            if s_owner:
                # Present a smaller view of which servers are here (for now).
                tmp_servers.append(
                    munch.Munch({
                        'id': s.id,
                        'owner': s_owner,
                        'name': s.name,
                    }))
        subject = self.render_template('hv_subject', {'what': what.title()})
        subject = subject.strip()
        body = self.render_template(
            template, {
                'hypervisor': hypervisor,
                'vms': tmp_servers,
                'what': what,
                'description': description,
                'when': when,
                'subject': subject,
                'test_mode': test_mode,
            })
        return subject, body

    def _run(self,
             hypervisor,
             template,
             what,
             description,
             when=None,
             only_private=True,
             cloud='',
             test_mode=False,
             notify_email=True,
             notify_slack=True):
        ecm = self.bot.clients.ecm_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        if when is None:
            when = self.date_wrangler.get_now()
        if not self.template_exists(template):
            replier("Template `%s` does not exist. Try again." % template)
        else:
            servers, searched_clouds, _found_clouds = self._search(
                hypervisor, {'host': hypervisor},
                target_search=True,
                only_private=only_private,
                cloud=cloud,
                replier=replier)
            if servers:
                self._emit_servers(servers)
                subject, body = self._build_template(servers,
                                                     hypervisor,
                                                     template,
                                                     what,
                                                     when,
                                                     description,
                                                     test_mode=test_mode)
                attachment = {
                    'pretext':
                    ("Found %s servers hosted on hypervisor `%s`, please"
                     " confirm that you wish to notify owners"
                     " of these servers using bundled template"
                     " `%s`." % (len(servers), hypervisor, template)),
                    'text':
                    "\n".join([
                        "_Subject:_ `%s`" % subject,
                        "_Body:_",
                        '```',
                        body,
                        '```',
                    ]),
                    'mrkdwn_in': ["text", 'pretext'],
                }
                self.message.reply_attachments(
                    attachments=[attachment],
                    log=LOG,
                    link_names=True,
                    as_user=True,
                    text=' ',
                    thread_ts=self.message.body.ts,
                    channel=self.message.body.channel,
                    unfurl_links=False)
                f = followers.ConfirmMe(confirms_what='notification')
                replier(f.generate_who_satisifies_message(self))
                self.wait_for_transition(wait_timeout=300,
                                         wait_start_state='CONFIRMING',
                                         follower=f)
                if self.state != 'CONFIRMED_CANCELLED':
                    self.change_state("SPAMMING")
                    admin_owning_group = self.config.get('admin_owning_group')
                    sent, _unknowns, targets = ecm.notify_server_owners(
                        servers,
                        subject,
                        body,
                        test_mode=test_mode,
                        notify_email=notify_email,
                        notify_slack=notify_slack,
                        admin_owning_group=admin_owning_group)
                    if sent:
                        replier("Notification spam"
                                " sent (via slack and/or email) to %s"
                                " groups." % (len(targets)))
                    else:
                        replier("Spam not sent (either no targets found"
                                " or no requested spam mechanisms"
                                " provided).")
                else:
                    replier("Notification cancelled.")
            else:
                replier("Sorry I could not find `%s` in %s clouds,"
                        " try another?" % (hypervisor, searched_clouds))
Exemplo n.º 27
0
class Handler(handler.TriggeredHandler):
    """Get stock information."""

    stock_url = 'https://www.alphavantage.co/query'

    # NOTE: If more than 100 symbols are included, the API will
    # return quotes for the first 100 symbols.
    #
    # In order to fix that just split into 100 size chunks...
    max_per_call = 100

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('stock', takes_args=True),
        ],
        'args': {
            'order': ['symbols'],
            'converters': {},
            'schema': Schema({
                Required("symbols"): All(scu.string_types(), Length(min=1)),
            }),
            'help': {
                'symbols': 'symbol(s) to lookup (comma separated)',
            },
            'defaults': {
                'symbols': 'gddy',
            },
        },
    }

    def _run(self, **kwargs):
        symbols = kwargs.get('symbols', "")
        symbols = symbols.split(",")
        symbols = [s.strip() for s in symbols if s.strip()]
        seen_symbols = set()
        headers = ["Symbol", "Price", "Volume"]
        rows = []
        uniq_symbols = []
        for s in symbols:
            tmp_s = s.upper()
            if tmp_s in seen_symbols:
                continue
            else:
                uniq_symbols.append(tmp_s)
                seen_symbols.add(tmp_s)
        for batch in utils.iter_chunks(uniq_symbols, self.max_per_call):
            url = self.stock_url + "?"
            url += urllib.urlencode({
                'function': 'BATCH_STOCK_QUOTES',
                'symbols': ",".join(batch),
                'datatype': 'csv',
                'apikey': self.config.stock.apikey,
            })
            resp = requests.get(url)
            resp.raise_for_status()
            for row in csv.DictReader(
                    six.StringIO(resp.content.decode('utf-8'))):
                rows.append([
                    row['symbol'],
                    row['price'],
                    row['volume'],
                ])
        lines = [
            "```",
            tabulate.tabulate(rows, headers=headers),
            "```",
        ]
        replier = self.message.reply_text
        replier("\n".join(lines), threaded=True, prefixed=False)
Exemplo n.º 28
0
class ListServersOnHypervisor(Searcher, handler.TriggeredHandler):
    """Lists virtual machines on a hypervisor."""

    requires_topo_loader = True
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack hypervisor list-vms', takes_args=True),
        ],
        'args': {
            'order': [
                'hypervisor',
                'only_private',
                'cloud',
            ],
            'help': {
                'hypervisor':
                'hypervisor to list vms on',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
            },
            'converters': {
                'only_private': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("hypervisor"):
                All(su.string_types(), Length(min=1)),
                Required("only_private"):
                bool,
                Required("cloud"):
                su.string_types(),
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud_viewers'),
    }

    def _run(self, hypervisor, only_private=True, cloud=''):
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        servers, searched_clouds, _found_clouds = self._search(
            hypervisor, {'host': hypervisor},
            target_search=True,
            only_private=only_private,
            cloud=cloud,
            replier=replier)
        if servers:
            self._emit_servers(servers)
            replier("Found %s possible matches, hopefully one of"
                    " them was what you were looking for..." % len(servers))
        else:
            replier("Sorry I could not find `%s` in %s clouds,"
                    " try another?" % (hypervisor, searched_clouds))
Exemplo n.º 29
0
def _build_handler_from_jenkins(jenkins_client,
                                job_name,
                                restricted_ldap_groups=None,
                                description=None,
                                cmd_suffix='',
                                cmd_prefix=''):
    job = jenkins_client.get_job(job_name)
    if job is None:
        return None, None, None
    handles_what = {
        'args': {},
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'followers': [
            ConsoleFollower,
            AbortFollower,
        ],
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
    }
    cleaned_job_name = job_name.replace("-", " ").replace("_", " ")

    trigger_text = cleaned_job_name.lower()
    if cmd_suffix:
        trigger_text += " " + cmd_suffix
    if cmd_prefix:
        trigger_text = cmd_prefix + " " + trigger_text

    raw_param_defs = list(job.get_params())
    param_defs = collections.OrderedDict()
    for param in raw_param_defs:
        param_name = param['name']
        if param_name in param_defs:
            continue
        param_def = {}
        param_type = param['type']
        param_extra_description = ''
        if param_type in (
                'StringParameterDefinition',
                # TODO(harlowja): can we do validation?
                'ValidatingStringParameterDefinition'):
            param_def['type'] = str
        elif param_type == 'BooleanParameterDefinition':
            param_def['type'] = bool
            param_def['converter'] = hu.strict_bool_from_string
        elif param_type == 'ChoiceParameterDefinition':
            param_def['type'] = str
            choices = list(p.strip() for p in param['choices'] if p.strip())
            choices.sort()
            param_def['converter'] = functools.partial(utils.only_one_of,
                                                       choices)
            param_extra_description = "one of [%s]" % (", ".join(choices))
        else:
            raise RuntimeError("Unknown how to translate jenkins job '%s'"
                               " param '%s' type '%s' into a"
                               " python type: %s" %
                               (job_name, param_name, param_type, param))
        if 'defaultParameterValue' in param:
            param_def['default'] = param['defaultParameterValue']['value']
        if 'description' in param:
            param_description = param['description']
            if param_extra_description:
                # Do some cleanup on the existing description before
                # we mess with it (so that it formats nicer).
                param_description = param_description.strip()
                param_description = param_description.rstrip(".")
                param_description += " " + param_extra_description
            param_def['help'] = param_description
        elif param_extra_description:
            param_def['help'] = param_extra_description
        param_defs[param_name] = param_def

    args_converters = {}
    args_order = []
    args_defaults = {}
    args_help = {}
    for param_name, param_def in param_defs.items():
        args_order.append(param_name)
        if 'converter' in param_def:
            args_converters[param_name] = param_def['converter']
        if 'default' in param_def:
            args_defaults[param_name] = param_def['default']
        if 'help' in param_def:
            args_help[param_name] = param_def['help']

    handles_what['triggers'] = [
        trigger.Trigger(trigger_text, takes_args=bool(args_order)),
    ]

    handles_what['args']['help'] = args_help
    handles_what['args']['defaults'] = args_defaults
    handles_what['args']['converters'] = args_converters
    handles_what['args']['order'] = args_order

    if not description:
        description = "Initiates a %s build." % job_name

    job_cls_dct = {
        'handles_what': handles_what,
        'job_name': job_name,
        '__doc__': description,
        '__module__': __name__,
    }
    job_type_name = job_name
    job_type_name = job_type_name.replace("-", "_")
    job_type_name = job_type_name.replace(" ", "_")
    job_type_name = job_type_name.replace("\t", "_")
    job_type_name_pieces = job_type_name.split("_")
    for i in compat_range(0, len(job_type_name_pieces)):
        p = job_type_name_pieces[i]
        p = p.strip()
        if p:
            job_type_name_pieces[i] = p.title()
        else:
            job_type_name_pieces[i] = ''
    job_type_name = "%sJobHandler" % ("".join(job_type_name_pieces))
    job_type_name = str(job_type_name)
    job_cls = type(job_type_name, (JobHandler, ), job_cls_dct)
    return (job_type_name, job_cls, job_cls_dct)
Exemplo n.º 30
0
class Unfurler(handler.Handler):
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.BROADCAST),
    }
    config_on_off = ("unfurl", False)
    required_clients = ('jira', )
    config_section = 'jira'
    cache = (None, None)
    cache_refresh = 600

    @classmethod
    def handles(cls, message, channel, config):
        channel_matcher = cls.handles_what['channel_matcher']
        if not channel_matcher(channel):
            return None
        message_matcher = cls.handles_what['message_matcher']
        if (not message_matcher(message, cls, only_to_me=False) or
                # Skip threaded entries...
                message.body.thread_ts):
            return None
        projects, projects_matchers = cls.cache
        if not projects:
            return None
        unfurl_projects = config.get("unfurl_projects")
        matches = {}
        for p, p_matcher in compat_zip(projects, projects_matchers):
            if (not p_matcher or (unfurl_projects is not None
                                  and p.key not in unfurl_projects)):
                continue
            p_matches = p_matcher.findall(message.body.text_no_links)
            if p_matches:
                matches[p.key] = set(p_matches)
        if not matches:
            return None
        else:
            return handler.ExplicitHandlerMatch({'matches': matches})

    @classmethod
    def insert_periodics(cls, bot, scheduler):
        def refresh_projects(jira_client):
            """Periodic loads and caches jira projects."""
            try:
                projects = jira_client.projects()
            except jira.JIRAError:
                LOG.warn("Failed fetching jira projects", exc_info=True)
            else:
                projects_matchers = []
                for p in projects:
                    p_key = p.key
                    if not p_key:
                        p_matcher = None
                    else:
                        p_matcher = re.compile(re.escape(p_key) + r"[-]\d+")
                    projects_matchers.append(p_matcher)
                cls.cache = (projects, projects_matchers)

        try:
            jira_client = bot.clients.jira_client
        except AttributeError:
            pass
        else:
            refresh_projects_name = reflection.get_callable_name(
                refresh_projects)
            refresh_projects_description = refresh_projects.__doc__
            scheduler.add_job(
                refresh_projects,
                trigger=cron.CronTrigger.from_crontab("*/10 * * * *",
                                                      timezone=bot.config.tz),
                args=(jira_client, ),
                jobstore='memory',
                name="\n".join(
                    [refresh_projects_name, refresh_projects_description]),
                # Run right when scheduler starts up...
                next_run_time=bot.date_wrangler.get_now(),
                id=utils.hash_pieces(
                    [refresh_projects_name, refresh_projects_description],
                    max_len=8),
                coalesce=True)

    def _run(self, matches):
        jac = self.bot.clients.jira_client
        attachments = []
        for p_key in sorted(matches.keys()):
            for raw_issue in sorted(matches[p_key]):
                try:
                    issue = jac.issue(raw_issue)
                except jira.JIRAError:
                    pass
                else:
                    attachments.append(_convert_issue_to_attachment(issue))
        if attachments:
            self.message.reply_attachments(channel=self.message.body.channel,
                                           text=None,
                                           link_names=True,
                                           as_user=True,
                                           unfurl_links=True,
                                           attachments=attachments,
                                           log=LOG,
                                           thread_ts=self.message.body.ts)