Exemplo n.º 1
0
class Handler(handler.TriggeredHandler):
    """Causes the bot to turn itself off/shutdown."""

    ack_prefix = 'Shutdown acknowledged.'
    ack_messages = [
        "Goodbye!",
        "I am out of here.",
        "I am so out of here.",
        "Live long and prosper.",
        "Peace out y'all!",
    ]
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('shutdown', takes_args=False),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, **kwargs):
        ack_msg = self.ack_prefix
        ack_msg += " "
        ack_msg += random.choice(self.ack_messages)
        replier = self.message.reply_text
        replier(ack_msg, threaded=True, prefixed=False)
        if not self.bot.dead.is_set():
            self.bot.dead.set()
Exemplo n.º 2
0
class Handler(handler.TriggeredHandler):
    """Causes the bot to restart itself."""

    ack_prefix = 'Restart acknowledged.'
    ack_messages = [
        "Be back in a bit!",
    ]
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('restart', takes_args=False),
        ],
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, **kwargs):
        ack_msg = self.ack_prefix
        ack_msg += " "
        ack_msg += random.choice(self.ack_messages)
        replier = self.message.reply_text
        replier(ack_msg, threaded=True, prefixed=False)
        if not self.bot.dead.is_set():
            self.bot.dead.set(self.bot.dead.RESTART)
Exemplo n.º 3
0
class RunAllHandler(handler.TriggeredHandler):
    """Explicitly runs all periodic jobs."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics run all', takes_args=True),
        ],
        'args': {
            'order': [
                'skip_paused',
            ],
            'help': {
                'skip_paused': ('skip over paused jobs (ie do not'
                                ' unpause them)'),
            },
            'defaults': {
                'skip_paused': True,
            },
            'converters': {
                'skip_paused': hu.strict_bool_from_string,
            },
            'schema': Schema({
                Required("skip_paused"): bool,
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, skip_paused):
        kicked = 0
        seen_jobs = set()
        skipped = 0
        for job in self.bot.scheduler.get_jobs():
            if job.id in seen_jobs:
                continue
            seen_jobs.add(job.id)
            if skip_paused and job.next_run_time is None:
                skipped += 1
                continue
            job.modify(next_run_time=self.date_wrangler.get_now())
            kicked += 1
        if kicked:
            self.bot.scheduler.wakeup()
        text = ("Kicked %s jobs"
                " and skipped %s jobs.") % (kicked, skipped)
        self.message.reply_text(text, threaded=True, prefixed=False)
Exemplo n.º 4
0
    def test_user_in_ldap_groups(self):
        fake_ldap = mock.MagicMock()
        fake_ldap.is_allowed.return_value = True

        bot = common.make_bot()
        bot.config.my_group = 'abc'
        bot.clients.ldap_client = fake_ldap
        message = mock.MagicMock()
        message.body = munch.Munch()
        message.body.user_id = 'joe'
        message.body.user_name = 'joe'

        a = auth.user_in_ldap_groups('my_group')
        a(bot, message)
        fake_ldap.is_allowed.assert_called()
Exemplo n.º 5
0
    def test_user_in_ldap_groups_bad(self):
        fake_ldap = mock.MagicMock()
        fake_ldap.is_allowed.return_value = False

        bot = common.make_bot()
        bot.config.my_group = 'abc'
        bot.clients.ldap_client = fake_ldap
        message = mock.MagicMock()
        message.body = munch.Munch()
        message.body.user_id = 'joe'
        message.body.user_name = 'joe'

        a = auth.user_in_ldap_groups('my_group')
        self.assertRaises(excp.NotAuthorized, a, bot, message)
        fake_ldap.is_allowed.assert_called()
Exemplo n.º 6
0
class JenkinsRestartHandler(handler.TriggeredHandler):
    """Triggers the jenkins the bot is connected to, to restart."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins restart', takes_args=True),
        ],
        'args': {
            'order': [
                'safe',
            ],
            'schema': Schema({
                Required("safe"): bool,
            }),
            'converters': {
                'safe': hu.strict_bool_from_string,
            },
            'help': {
                'safe': "perform a safe restart (letting active jobs finish)",
            },
            'defaults': {
                'safe': True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, safe):
        jenkins_client = self.bot.clients.jenkins_client
        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        if safe:
            replier("Engaging *safe* jenkins restart, please wait...")
        else:
            replier("Engaging *unsafe* (ie forceful)"
                    " jenkins restart, please wait...")
        if jenkins_client.perform_restart(safe=safe):
            replier("Restart acknowledged.")
        else:
            replier("Restart failed.")
Exemplo n.º 7
0
class WatchHandler(JobWatcher):
    """Watches a jenkins jobs build."""

    build_info_delay = 10

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'followers': [
            ConsoleFollower,
            AbortFollower,
        ],
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins watch', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
                'build',
            ],
            'schema':
            Schema({
                Required("job_name"): All(scu.string_types(), Length(min=1)),
                Required("build"): int,
            }),
            'converters': {
                'build': int,
            },
            'help': {
                'job_name': "job name to watch",
                "build": "build number to watch",
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, job_name, build):
        clients = self.bot.clients
        return self._watch(job_name, build, clients.jenkins_client)
Exemplo n.º 8
0
class RunOneHandler(handler.TriggeredHandler):
    """Explicitly runs one periodic jobs."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics run one', takes_args=True),
        ],
        'args': {
            'order': [
                'job_id',
            ],
            'help': {
                'job_id': 'job id to run',
            },
            'schema': Schema({
                Required("job_id"): All(scu.string_types(), Length(min=1)),
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, job_id):
        job = self.bot.scheduler.get_job(job_id)
        if job is None:
            raise excp.NotFound("Could not find job id '%s'" % job_id)
        elif job.next_run_time is None:
            raise RuntimeError("Paused job '%s' can not be explicitly"
                               " ran (please resume it first)" % job_id)
        else:
            job.modify(next_run_time=self.date_wrangler.get_now())
            self.bot.scheduler.wakeup()
            self.message.reply_text("Job `%s` has had"
                                    " its next run time"
                                    " updated to be now (hopefully it"
                                    " runs soon)." % job_id,
                                    threaded=True, prefixed=False)
Exemplo n.º 9
0
class ResumeHandler(handler.TriggeredHandler):
    """Resumes a previously paused periodic job."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics resume', takes_args=True),
        ],
        'args': {
            'order': [
                'job_id',
            ],
            'help': {
                'job_id': 'job id to resume',
            },
            'schema': Schema({
                Required("job_id"): All(scu.string_types(), Length(min=1)),
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, job_id):
        job = self.bot.scheduler.get_job(job_id)
        if job is None:
            raise excp.NotFound("Could not find job id '%s'" % job_id)
        if job.next_run_time is None:
            job.resume()
            self.bot.scheduler.wakeup()
            self.message.reply_text("Job `%s` has"
                                    " been resumed." % job_id,
                                    threaded=True, prefixed=False)
        else:
            self.message.reply_text("Job `%s` is not paused (so it can"
                                    " not be resumed)." % job_id,
                                    threaded=True, prefixed=False)
Exemplo n.º 10
0
class Handler(handler.TriggeredHandler):
    """Triggers a workflow to downgrade/upgrade the version of this bot."""
    wait_jenkins_queue_item = 0.1
    config_section = 'updater'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('upgrade', takes_args=True),
            trigger.Trigger('update', takes_args=True),
            trigger.Trigger('upgrayedd', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': [
                'version',
            ],
            'help': {
                'version': ('version of padre container to deploy'
                            ' (must exist in artifactory), if'
                            ' not provided then the lastest will'
                            ' be found'),
            },
        }
    }
    required_clients = ('jenkins',)
    required_secrets = (
        'ci.artifactory.ro_account',
    )

    def _await_confirm(self, old_version, version, changelog_lines):
        def _show_others_active():
            active_handlers = len(self.bot.active_handlers)
            return ("There are %s other active"
                    # Remove one since thats the upgrade handler itself...
                    " handlers.") % (max(0, active_handlers - 1))
        pretext_lines = [
            "Newer version `%s` found!" % version,
            "I am older version `%s`." % old_version,
        ]
        text_lines = []
        if changelog_lines:
            text_lines.append("Last `%s` changes:" % len(changelog_lines))
            text_lines.extend(changelog_lines)
        attachments = [{
            'pretext': "\n".join(pretext_lines),
            'mrkdwn_in': ['pretext', 'text'],
            "text": "\n".join(text_lines),
        }]
        self.message.reply_attachments(
            text="Good %s." % self.date_wrangler.get_when(),
            attachments=attachments,
            link_names=True, as_user=True,
            channel=self.message.body.channel,
            log=LOG, thread_ts=self.message.body.get("ts"))
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False,
                                    thread_ts=self.message.body.ts)
        f = followers.ConfirmMe(confirms_what='upgrading',
                                confirm_self_ok=True,
                                check_func=_show_others_active)
        replier(f.generate_who_satisifies_message(self))
        self.wait_for_transition(follower=f, wait_timeout=300,
                                 wait_start_state='CONFIRMING')
        if self.state == 'CONFIRMED_CANCELLED':
            raise excp.Cancelled

    def _run(self, **kwargs):
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False)
        me = pkg_resources.get_distribution('padre')
        ro_account = self.bot.secrets.ci.artifactory.ro_account
        version = kwargs.get("version")
        version_provided = bool(version)
        project_url = self.bot.config.updater.project_url
        path = None
        if not version_provided:
            replier("Scanning artifactory, please wait...")
            newer_paths_it = uu.iter_updates(me.version,
                                             ro_account, project_url)
            newer_paths = sorted(newer_paths_it, key=lambda v: v.version)
            if newer_paths:
                path = newer_paths[-1].path
                version = str(newer_paths[-1].version)
        if not version:
            replier("No potentially upgradeable versions"
                    " found under '%s'" % project_url)
            return
        if me.version == version:
            replier("Nothing to upgrade, version desired is equivalent"
                    " to active version.")
            return
        if path is None:
            tmp_path = uu.check_fetch_version(version, ro_account, project_url)
            path = tmp_path.path
        self._await_confirm(
            me.version, version, uu.extract_changelog(path))
        self.change_state("UPGRADING")
        jenkins_job = self.config.jenkins_job
        jenkins_client = self.bot.clients.jenkins_client
        job = jenkins_client.get_job(jenkins_job)
        if job is not None:
            replier(
                "Triggering upgrade to"
                " version `%s` by kicking job `%s`." % (version,
                                                        jenkins_job))
            qi = job.invoke(build_params={
                'image_tag': version,
                'bot': self.bot.name or "",
            })
            replier("Your upgrade to `%s` job"
                    " has been queued." % version)
            build = None
            while build is None:
                if self.dead.is_set():
                    # Oh well, someone else killed us...
                    raise excp.Dying
                qi.poll()
                build = qi.get_build()
                if build is None:
                    self.dead.wait(self.wait_jenkins_queue_item)
            replier(
                "Your upgrade to `%s` job has"
                " started at %s. I am going into stealth/quiet"
                " mode until then (resurrection expected in %0.2f"
                " seconds), goodbye..." % (version, build.url,
                                           build.get_eta()))
            self.bot.quiescing = True
            self.bot.scheduler.shutdown(wait=False)
        else:
            raise excp.NotFound(
                "Jenkins upgrade job '%s' was not"
                " found" % jenkins_job)
Exemplo n.º 11
0
class CalcSizeHandler(handler.TriggeredHandler):
    """Determines size of docker artifactory repositories."""

    config_section = 'artifactory'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('artifactory calculate size', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': ['project'],
            'help': {
                'project': 'project to scan',
            },
            'schema': Schema({
                Required("project"): All(su.string_types(), Length(min=1)),
            }),
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
    )
    required_configurations = ('base_url',)

    def _run(self, project):
        ro_account = self.bot.secrets.ci.artifactory.ro_account

        path = _find_path(self.config, project, ro_account)
        if not path:
            raise excp.NotFound("Could not find project '%s'" % project)

        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        replier("Determining current size of `%s`, please"
                " wait..." % project)

        all_sizes = [
            path.stat().size,
        ]
        child_paths = list(path.iterdir())
        child_paths = sorted(child_paths, key=lambda p: p.name)
        if child_paths:
            c_pbar = self.message.make_progress_bar(
                len(child_paths), update_period=_calc_emit_every(child_paths))
            for child_path in c_pbar.wrap_iter(child_paths):
                if self.dead.is_set():
                    break
                all_sizes.append(child_path.stat().size)
                replier("Determining total size"
                        " of top-level child `%s`, please"
                        " wait..." % child_path.name)
                sub_child_paths = list(child_path.iterdir())
                if sub_child_paths:
                    sc_pbar = self.message.make_progress_bar(
                        len(sub_child_paths),
                        update_period=_calc_emit_every(sub_child_paths))
                    for sub_child_path in sc_pbar.wrap_iter(sub_child_paths):
                        if self.dead.is_set():
                            break
                        try:
                            sub_child_size = _calc_docker_size(
                                sub_child_path, sub_child_path.stat().size)
                        except excp.NotFound:
                            sub_child_size = 0
                            for size in _iter_sizes_deep(sub_child_path):
                                if self.dead.is_set():
                                    break
                                sub_child_size += size
                        all_sizes.append(sub_child_size)

        if self.dead.is_set():
            replier("Died during scanning, please"
                    " try again next time...")
        else:
            replier(
                "Size of `%s` is %s" % (project,
                                        utils.format_bytes(
                                            sum(all_sizes), quote=True)))
Exemplo n.º 12
0
class PruneHandler(handler.TriggeredHandler):
    """Prunes a docker artifactory repositories."""

    config_section = 'artifactory'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('artifactory prune', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': ['project', 'target_size'],
            'help': {
                'project': 'project to scan',
                'target_size': 'target size to prune project repo to',
            },
            'schema': Schema({
                Required("project"): All(su.string_types(), Length(min=1)),
                Required("target_size"): All(int, Range(min=0)),
            }),
            'converters': {
                'target_size': functools.partial(strutils.string_to_bytes,
                                                 return_int=True,
                                                 # Because artifactory
                                                 # is using the SI
                                                 # system... arg...
                                                 unit_system='SI'),
            },
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
        'ci.artifactory.push_account',
    )
    required_configurations = ('base_url',)

    def _do_prune(self, prune_what):
        dirs_pruned = 0
        files_pruned = 0
        was_finished = True
        pbar = self.message.make_progress_bar(
            len(prune_what), update_period=_calc_emit_every(prune_what))
        for child in pbar.wrap_iter(prune_what):
            if self.dead.is_set():
                was_finished = False
                break
            stack = collections.deque()
            stack.append((child.path, False))
            while stack:
                # NOTE: we do not check dead.is_set() here which might
                # be ok, but is done so that we don't delete a sub child
                # half-way (which if done may leave any docker images
                # half-way-working... ie missing components/layers...
                # which would be bad).
                p, p_visited = stack.pop()
                p_is_dir = p.is_dir()
                if p_is_dir and not p_visited:
                    stack.append((p, True))
                    stack.extend((c_p, False) for c_p in p.iterdir())
                elif p_is_dir and p_visited:
                    p.rmdir()
                    dirs_pruned += 1
                else:
                    p.unlink()
                    files_pruned += 1
        return (dirs_pruned, files_pruned, was_finished)

    def _do_scan(self, replier, path, target_size):
        root_child_paths = list(path.iterdir())
        all_sub_children = []
        replier("Finding all sub-children of"
                " %s top-level children." % len(root_child_paths))

        if root_child_paths:
            pbar = self.message.make_progress_bar(
                len(root_child_paths),
                update_period=_calc_emit_every(root_child_paths))
            for child_path in pbar.wrap_iter(root_child_paths):
                if self.dead.is_set():
                    raise excp.Dying
                replier("Scanning top-level"
                        " child `%s`, please wait..." % child_path.name)
                sub_child_paths = list(child_path.iterdir())
                if sub_child_paths:
                    rc_pbar = self.message.make_progress_bar(
                        len(sub_child_paths),
                        update_period=_calc_emit_every(sub_child_paths))
                    for sub_child_path in rc_pbar.wrap_iter(sub_child_paths):
                        if self.dead.is_set():
                            raise excp.Dying
                        all_sub_children.append(munch.Munch({
                            'path': sub_child_path,
                            'frozen': _is_frozen(sub_child_path),
                            'ctime': sub_child_path.stat().ctime,
                            'size': sub_child_path.stat().size,
                            'parent': child_path,
                        }))

        all_sub_children = sorted(all_sub_children, key=lambda p: p.ctime)
        num_childs_frozen = sum(int(sc.frozen) for sc in all_sub_children)
        replier("Determining total sizes"
                " of %s sub-children"
                " (%s are frozen)." % (len(all_sub_children),
                                       num_childs_frozen))
        if all_sub_children:
            pbar = self.message.make_progress_bar(
                len(all_sub_children),
                update_period=_calc_emit_every(all_sub_children))
            for sub_child in pbar.wrap_iter(all_sub_children):
                if self.dead.is_set():
                    raise excp.Dying
                try:
                    total_size = _calc_docker_size(sub_child.path,
                                                   sub_child.size)
                except excp.NotFound:
                    total_size = 0
                    for size in _iter_sizes_deep(sub_child.path):
                        if self.dead.is_set():
                            raise excp.Dying
                        total_size += size
                sub_child.total_size = total_size

        accum_size = 0
        prune_what = []
        for sub_child in reversed(all_sub_children):
            if sub_child.frozen:
                continue
            accum_size += sub_child.total_size
            if accum_size >= target_size:
                prune_what.append(sub_child)
        prune_what.reverse()
        return prune_what

    def _format_child(self, child):
        try:
            child_pretext = "%s/%s" % (child.parent.name, child.path.name)
        except AttributeError:
            child_pretext = "%s" % child.path.name
        attachment = {
            'pretext': child_pretext,
            'mrkdwn_in': [],
            'footer': "Artifactory",
            'footer_icon': ART_ICON,
        }
        tot_size = utils.format_bytes(child.total_size)
        attachment['fields'] = [
            {
                'title': 'Size',
                'value': tot_size,
                'short': utils.is_short(tot_size),
            },
            {
                "title": "Created on",
                "value": _format_dt(child.ctime),
                "short": True,
            },
        ]
        return attachment

    def _run(self, project, target_size):
        push_account = self.bot.secrets.ci.artifactory.push_account
        path = _find_path(self.config, project, push_account)
        if not path:
            raise excp.NotFound("Could not find project '%s'" % project)
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False)
        replier("Scanning `%s`, please wait..." % project)
        try:
            prune_what = self._do_scan(replier, path, target_size)
        except excp.Dying:
            replier("Died during scanning, please try"
                    " again next time...")
            return
        if not prune_what:
            replier("Nothing to prune found.")
            return
        self.message.reply_attachments(
            attachments=list(self._format_child(c) for c in prune_what),
            log=LOG, link_names=True, as_user=True,
            thread_ts=self.message.body.ts,
            channel=self.message.body.channel)
        replier("Please confirm the pruning of"
                " %s paths." % len(prune_what))
        f = followers.ConfirmMe(confirms_what='pruning')
        replier(f.generate_who_satisifies_message(self))
        self.wait_for_transition(wait_timeout=300, follower=f,
                                 wait_start_state='CONFIRMING')
        if self.state != 'CONFIRMED_CANCELLED':
            self.change_state("PRUNING")
            replier("Initiating prune of %s paths." % len(prune_what))
            dirs_pruned, files_pruned, done = self._do_prune(prune_what)
            replier("Pruned %s directories and"
                    " %s files." % (dirs_pruned, files_pruned))
            if not done:
                replier("This was a partial prune, since I died"
                        " during pruning, please try"
                        " again next time...")
        else:
            replier("Pruning cancelled.")
Exemplo n.º 13
0
class ListServersOnHypervisor(Searcher, handler.TriggeredHandler):
    """Lists virtual machines on a hypervisor."""

    requires_topo_loader = True
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack hypervisor list-vms', takes_args=True),
        ],
        'args': {
            'order': [
                'hypervisor',
                'only_private',
                'cloud',
            ],
            'help': {
                'hypervisor':
                'hypervisor to list vms on',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
            },
            'converters': {
                'only_private': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("hypervisor"):
                All(su.string_types(), Length(min=1)),
                Required("only_private"):
                bool,
                Required("cloud"):
                su.string_types(),
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud_viewers'),
    }

    def _run(self, hypervisor, only_private=True, cloud=''):
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        servers, searched_clouds, _found_clouds = self._search(
            hypervisor, {'host': hypervisor},
            target_search=True,
            only_private=only_private,
            cloud=cloud,
            replier=replier)
        if servers:
            self._emit_servers(servers)
            replier("Found %s possible matches, hopefully one of"
                    " them was what you were looking for..." % len(servers))
        else:
            replier("Sorry I could not find `%s` in %s clouds,"
                    " try another?" % (hypervisor, searched_clouds))
Exemplo n.º 14
0
class ConsoleHandler(handler.TriggeredHandler):
    """Gets a jenkins jobs console log."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins console', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
                'build',
                'lines',
            ],
            'schema':
            Schema({
                Required("job_name"): All(scu.string_types(), Length(min=1)),
                Required("build"): int,
                Required("lines"): int,
            }),
            'converters': {
                'build': int,
                'lines': int,
            },
            'defaults': {
                'lines': CONSOLE_LINES,
            },
            'help': {
                'job_name':
                "job name to fetch",
                "build":
                "build identifier to fetch",
                "lines": ("maximum number of lines from the"
                          " console to respond"
                          " with (negative for no limit)"),
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, job_name, build, lines):
        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        replier("Fetching job `%s` build `%s`"
                " console, please wait..." % (job_name, build))
        clients = self.bot.clients
        job = clients.jenkins_client.get_job(job_name)
        if job is None:
            replier("Job `%s` was not found!" % job_name)
            return
        build_num = build
        build = job.get_build(build_num)
        if build is not None:
            console_out = build.get_console()
            console_out = _format_build_console(console_out, line_limit=lines)
            replier(console_out)
        else:
            replier("Job `%s` build `%s` was"
                    " not found!" % (job_name, build_num))
Exemplo n.º 15
0
def _build_handler_from_jenkins(jenkins_client,
                                job_name,
                                restricted_ldap_groups=None,
                                description=None,
                                cmd_suffix='',
                                cmd_prefix=''):
    job = jenkins_client.get_job(job_name)
    if job is None:
        return None, None, None
    handles_what = {
        'args': {},
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'followers': [
            ConsoleFollower,
            AbortFollower,
        ],
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
    }
    cleaned_job_name = job_name.replace("-", " ").replace("_", " ")

    trigger_text = cleaned_job_name.lower()
    if cmd_suffix:
        trigger_text += " " + cmd_suffix
    if cmd_prefix:
        trigger_text = cmd_prefix + " " + trigger_text

    raw_param_defs = list(job.get_params())
    param_defs = collections.OrderedDict()
    for param in raw_param_defs:
        param_name = param['name']
        if param_name in param_defs:
            continue
        param_def = {}
        param_type = param['type']
        param_extra_description = ''
        if param_type in (
                'StringParameterDefinition',
                # TODO(harlowja): can we do validation?
                'ValidatingStringParameterDefinition'):
            param_def['type'] = str
        elif param_type == 'BooleanParameterDefinition':
            param_def['type'] = bool
            param_def['converter'] = hu.strict_bool_from_string
        elif param_type == 'ChoiceParameterDefinition':
            param_def['type'] = str
            choices = list(p.strip() for p in param['choices'] if p.strip())
            choices.sort()
            param_def['converter'] = functools.partial(utils.only_one_of,
                                                       choices)
            param_extra_description = "one of [%s]" % (", ".join(choices))
        else:
            raise RuntimeError("Unknown how to translate jenkins job '%s'"
                               " param '%s' type '%s' into a"
                               " python type: %s" %
                               (job_name, param_name, param_type, param))
        if 'defaultParameterValue' in param:
            param_def['default'] = param['defaultParameterValue']['value']
        if 'description' in param:
            param_description = param['description']
            if param_extra_description:
                # Do some cleanup on the existing description before
                # we mess with it (so that it formats nicer).
                param_description = param_description.strip()
                param_description = param_description.rstrip(".")
                param_description += " " + param_extra_description
            param_def['help'] = param_description
        elif param_extra_description:
            param_def['help'] = param_extra_description
        param_defs[param_name] = param_def

    args_converters = {}
    args_order = []
    args_defaults = {}
    args_help = {}
    for param_name, param_def in param_defs.items():
        args_order.append(param_name)
        if 'converter' in param_def:
            args_converters[param_name] = param_def['converter']
        if 'default' in param_def:
            args_defaults[param_name] = param_def['default']
        if 'help' in param_def:
            args_help[param_name] = param_def['help']

    handles_what['triggers'] = [
        trigger.Trigger(trigger_text, takes_args=bool(args_order)),
    ]

    handles_what['args']['help'] = args_help
    handles_what['args']['defaults'] = args_defaults
    handles_what['args']['converters'] = args_converters
    handles_what['args']['order'] = args_order

    if not description:
        description = "Initiates a %s build." % job_name

    job_cls_dct = {
        'handles_what': handles_what,
        'job_name': job_name,
        '__doc__': description,
        '__module__': __name__,
    }
    job_type_name = job_name
    job_type_name = job_type_name.replace("-", "_")
    job_type_name = job_type_name.replace(" ", "_")
    job_type_name = job_type_name.replace("\t", "_")
    job_type_name_pieces = job_type_name.split("_")
    for i in compat_range(0, len(job_type_name_pieces)):
        p = job_type_name_pieces[i]
        p = p.strip()
        if p:
            job_type_name_pieces[i] = p.title()
        else:
            job_type_name_pieces[i] = ''
    job_type_name = "%sJobHandler" % ("".join(job_type_name_pieces))
    job_type_name = str(job_type_name)
    job_cls = type(job_type_name, (JobHandler, ), job_cls_dct)
    return (job_type_name, job_cls, job_cls_dct)
Exemplo n.º 16
0
class UnplannedHandler(handler.TriggeredHandler):
    """Creates a unplanned issue + associates it to an active sprint."""

    # Because the client library fetches things over and over
    # and things we know to be the same, aren't changing a lot/ever...
    #
    # Size of these was picked somewhat arbitrarily but should be fine...
    cache = munch.Munch({
        'projects': LRUCache(maxsize=100),
        'boards': LRUCache(maxsize=100),
    })
    required_clients = ('jira', )
    config_section = 'jira'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jira unplanned', takes_args=True),
        ],
        'args': {
            'order': [
                'summary',
                'time_taken',
                'was_resolved',
                'project',
                'board',
            ],
            'converters': {
                'time_taken': _convert_time_taken,
                'was_resolved': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("summary"): All(scu.string_types(), Length(min=1)),
                Required("project"): All(scu.string_types(), Length(min=1)),
                Required("board"): All(scu.string_types(), Length(min=1)),
                Required("time_taken"): int,
                Required("was_resolved"): bool,
            }),
            'help': {
                'summary':
                "short summary of the unplanned work",
                'board':
                'board to locate sprint to'
                ' drop newly created issue in (must exist)',
                'time_taken': ('time taken on unplanned'
                               ' work (ie 30 seconds, 5 minutes,'
                               ' 1 hour, 1 day...)'),
                'project':
                'project to create task in (must exist)',
                'was_resolved':
                'mark the newly created issue as resolved',
            },
            'defaults': {
                'project': 'CAA',
                'board': 'CAA board',
                'time_taken': "1 hour",
                "was_resolved": True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    @staticmethod
    def _find_and_cache(fetcher_func, match_func, cache_target, cache_key):
        if cache_key and cache_key in cache_target:
            return cache_target[cache_key]
        offset = 0
        result = None
        found = False
        while not found:
            items = fetcher_func(start_at=offset)
            if not items:
                break
            else:
                for item in items:
                    if match_func(item):
                        result = item
                        found = True
                        break
                if not found:
                    offset = offset + len(items) + 1
        if found and cache_key:
            cache_target[cache_key] = result
        return result

    @classmethod
    def _find_project(cls, jac, project):
        def match_func(p):
            return (p.name.lower() == project.lower()
                    or p.key.lower() == project.lower() or p.id == project)

        def fetcher_func(all_projects, start_at):
            return all_projects[start_at:]

        return cls._find_and_cache(
            functools.partial(fetcher_func, jac.projects()), match_func,
            cls.cache.projects, project)

    @classmethod
    def _find_board(cls, jac, board, type='scrum'):
        def match_func(b):
            return (b.name.lower() == board.lower() or b.id == board)

        def fetcher_func(start_at):
            return jac.boards(type=type, startAt=start_at)

        return cls._find_and_cache(fetcher_func, match_func, cls.cache.boards,
                                   ":".join([board, type]))

    @classmethod
    def _find_sprint(cls, jac, board, board_name, ok_states):
        def match_func(s):
            return s.state.lower() in ok_states

        def fetcher_func(start_at):
            return jac.sprints(board.id, startAt=start_at)

        # We don't want to cache anything, since we expect sprints to
        # actually become active/inactive quite a bit...
        return cls._find_and_cache(fetcher_func, match_func, {}, None)

    @staticmethod
    def _create_issue(jac,
                      project,
                      secs_taken,
                      summary,
                      user_name,
                      channel_name='',
                      quick_link=None):
        mins_taken = secs_taken / 60.0
        hours_taken = mins_taken / 60.0
        days_taken = hours_taken / 24.0
        time_taken_pieces = [
            "%0.2f days" % (days_taken),
            "%0.2f hours" % (hours_taken),
            "%0.2f minutes" % (mins_taken),
            "%s seconds" % (secs_taken),
        ]
        time_taken_text = " or ".join(time_taken_pieces)
        new_issue_description_lines = [
            ("User @%s spent %s doing"
             " unplanned work.") % (user_name, time_taken_text),
        ]
        if channel_name:
            new_issue_description_lines.extend([
                "",
                "In channel: #%s" % channel_name,
            ])
        if quick_link:
            new_issue_description_lines.extend([
                "",
                "Reference: %s" % quick_link,
            ])
        new_issue_fields = {
            'summary': summary,
            'issuetype': {
                'name': 'Task',
            },
            'components': [{
                'name': "Unplanned"
            }],
            'assignee': {
                'name': user_name,
            },
            'project': project.id,
            'description': "\n".join(new_issue_description_lines),
        }
        new_issue = jac.create_issue(fields=new_issue_fields)
        new_issue_link = "<%s|%s>" % (new_issue.permalink(), new_issue.key)
        return (new_issue, new_issue_link)

    def _run(self, summary, time_taken, was_resolved, project, board):
        # Load and validate stuff (before doing work...)
        jac = self.bot.clients.jira_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        # This one is used here because it appears the the RTM one isn't
        # processing/sending links correctly (did it ever, but this one
        # does handle links right, so ya...)
        reply_attachments = functools.partial(
            self.message.reply_attachments,
            log=LOG,
            link_names=True,
            as_user=True,
            thread_ts=self.message.body.ts,
            channel=self.message.body.channel,
            unfurl_links=False)
        j_project = self._find_project(jac, project)
        if not j_project:
            raise excp.NotFound("Unable to find project '%s'" % (project))
        j_board = self._find_board(jac, board)
        if not j_board:
            raise excp.NotFound("Unable to find board '%s'" % (board))
        # Create it in that project...
        replier("Creating unplanned issue"
                " in project `%s`, please wait..." % (project))
        new_issue, new_issue_link = self._create_issue(
            jac,
            j_project,
            time_taken,
            summary,
            self.message.body.user_name,
            channel_name=self.message.body.get('channel_name'),
            quick_link=self.message.body.get('quick_link'))
        reply_attachments(attachments=[{
            'pretext': ("Created unplanned"
                        " issue %s.") % (new_issue_link),
            'mrkdwn_in': ['pretext'],
        }])
        # Find and bind it to currently active sprint (if any)...
        j_sprint = self._find_sprint(jac, j_board, board, ['active'])
        if j_sprint:
            reply_attachments(attachments=[{
                'pretext': ("Binding %s to active sprint `%s`"
                            " of board `%s`." %
                            (new_issue_link, j_sprint.name, board)),
                'mrkdwn_in': ['pretext'],
            }])
            jac.add_issues_to_sprint(j_sprint.id, [new_issue.key])
            reply_attachments(attachments=[{
                'pretext': ("Bound %s to active sprint `%s`"
                            " of board `%s`." %
                            (new_issue_link, j_sprint.name, board)),
                'mrkdwn_in': ['pretext'],
            }])
        else:
            replier("No active sprint found"
                    " in board `%s`, sprint binding skipped." % (board))
        # Mark it as done...
        if was_resolved:
            transition = None
            possible_transitions = set()
            for t in jac.transitions(new_issue.id):
                t_name = t.get('name', '')
                t_name = t_name.lower()
                if t_name in _RESOLVED_TRANSITIONS:
                    transition = t
                if t_name:
                    possible_transitions.add(t_name)
            if not transition:
                possible_transitions = sorted(possible_transitions)
                possible_transitions = " or ".join(
                    ["`%s`" % t.upper() for t in possible_transitions])
                ok_transitions = sorted(_RESOLVED_TRANSITIONS)
                ok_transitions = " or ".join(
                    ["`%s`" % t.upper() for t in ok_transitions])
                reply_attachments(attachments=[{
                    'pretext': ("Unable to resolve %s, could not find"
                                " issues %s"
                                " state transition!") %
                    (new_issue_link, ok_transitions),
                    'mrkdwn_in': ['pretext', 'text'],
                    "text": ("Allowable state"
                             " transitions: %s" % possible_transitions),
                }])
            else:
                reply_attachments(attachments=[{
                    'pretext': ("Transitioning %s issue to resolved, "
                                "please wait...") % (new_issue_link),
                    'mrkdwn_in': ['pretext'],
                }])
                jac.transition_issue(new_issue.id,
                                     transition['id'],
                                     comment="All done! kthxbye")
                replier("Transitioned.")
        replier = self.message.reply_text
        replier("Thanks for tracking your unplanned work!",
                prefixed=True,
                threaded=True)
Exemplo n.º 17
0
class NotifyOwnersOfServersOnHypervisor(Searcher, handler.TriggeredHandler):
    """Notify some owners of VMs on a hypervisor about something."""

    requires_topo_loader = True
    required_clients = ('ecm', )
    confirms_action = 'notification'
    confirms_what = 'something'
    template_subdir = 'maintenance'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack hypervisor notify-vm-owners',
                            takes_args=True),
        ],
        'args': {
            'order': [
                'hypervisor',
                'template',
                'what',
                'description',
                'when',
                'only_private',
                'cloud',
                # Various ecm passthroughs...
                'test_mode',
                'notify_slack',
                'notify_email',
            ],
            'help': {
                'hypervisor':
                'hypervisor to find vms on',
                'template':
                "notification template to use",
                'what':
                'one word for what is about to happen',
                'when': ("when the event is going to happen"
                         " in iso8601 format (if not"
                         " provided then the current time is used)"),
                'description':
                'multiple words for what is about to happen',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
                # Various ecm passthroughs...
                'test_mode':
                'ecm notification api test mode passthrough',
                'notify_slack':
                'send notification via slack',
                'notify_email':
                'send notification via email',
            },
            # This will be filled in during setup_class call (since it
            # needs semi-dynamic information from the bot configuration).
            'converters': {},
            'schema':
            Schema({
                Required("hypervisor"):
                All(su.string_types(), Length(min=1)),
                Required("only_private"):
                bool,
                Required("cloud"):
                su.string_types(),
                Required("what"):
                All(su.string_types(), Length(min=1)),
                Required("description"):
                All(su.string_types(), Length(min=1)),
                Required("template"):
                All(su.string_types(), Length(min=1)),
                Required("when"):
                Any(None, datetime.datetime),
                # Various ecm passthroughs...
                Required("test_mode"):
                bool,
                Required("notify_email"):
                bool,
                Required("notify_slack"):
                bool,
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
                'when': None,
                # Various ecm passthroughs...
                'test_mode': False,
                'notify_slack': True,
                'notify_email': True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    @classmethod
    def setup_class(cls, bot):
        tz = bot.config.tz
        cls.handles_what['args']['converters'].update({
            'only_private':
            hu.strict_bool_from_string,
            'when':
            functools.partial(_convert_dt, pytz.timezone(tz)),
            # Various ecm passthroughs...
            'test_mode':
            hu.strict_bool_from_string,
            'notify_slack':
            hu.strict_bool_from_string,
            'notify_email':
            hu.strict_bool_from_string,
        })

    def _build_template(self,
                        servers,
                        hypervisor,
                        template,
                        what,
                        when,
                        description,
                        test_mode=False):
        tmp_servers = []
        for s in servers:
            s_owner = None
            try:
                s_owner = s.metadata.owning_group
            except AttributeError:
                pass
            if s_owner:
                # Present a smaller view of which servers are here (for now).
                tmp_servers.append(
                    munch.Munch({
                        'id': s.id,
                        'owner': s_owner,
                        'name': s.name,
                    }))
        subject = self.render_template('hv_subject', {'what': what.title()})
        subject = subject.strip()
        body = self.render_template(
            template, {
                'hypervisor': hypervisor,
                'vms': tmp_servers,
                'what': what,
                'description': description,
                'when': when,
                'subject': subject,
                'test_mode': test_mode,
            })
        return subject, body

    def _run(self,
             hypervisor,
             template,
             what,
             description,
             when=None,
             only_private=True,
             cloud='',
             test_mode=False,
             notify_email=True,
             notify_slack=True):
        ecm = self.bot.clients.ecm_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        if when is None:
            when = self.date_wrangler.get_now()
        if not self.template_exists(template):
            replier("Template `%s` does not exist. Try again." % template)
        else:
            servers, searched_clouds, _found_clouds = self._search(
                hypervisor, {'host': hypervisor},
                target_search=True,
                only_private=only_private,
                cloud=cloud,
                replier=replier)
            if servers:
                self._emit_servers(servers)
                subject, body = self._build_template(servers,
                                                     hypervisor,
                                                     template,
                                                     what,
                                                     when,
                                                     description,
                                                     test_mode=test_mode)
                attachment = {
                    'pretext':
                    ("Found %s servers hosted on hypervisor `%s`, please"
                     " confirm that you wish to notify owners"
                     " of these servers using bundled template"
                     " `%s`." % (len(servers), hypervisor, template)),
                    'text':
                    "\n".join([
                        "_Subject:_ `%s`" % subject,
                        "_Body:_",
                        '```',
                        body,
                        '```',
                    ]),
                    'mrkdwn_in': ["text", 'pretext'],
                }
                self.message.reply_attachments(
                    attachments=[attachment],
                    log=LOG,
                    link_names=True,
                    as_user=True,
                    text=' ',
                    thread_ts=self.message.body.ts,
                    channel=self.message.body.channel,
                    unfurl_links=False)
                f = followers.ConfirmMe(confirms_what='notification')
                replier(f.generate_who_satisifies_message(self))
                self.wait_for_transition(wait_timeout=300,
                                         wait_start_state='CONFIRMING',
                                         follower=f)
                if self.state != 'CONFIRMED_CANCELLED':
                    self.change_state("SPAMMING")
                    admin_owning_group = self.config.get('admin_owning_group')
                    sent, _unknowns, targets = ecm.notify_server_owners(
                        servers,
                        subject,
                        body,
                        test_mode=test_mode,
                        notify_email=notify_email,
                        notify_slack=notify_slack,
                        admin_owning_group=admin_owning_group)
                    if sent:
                        replier("Notification spam"
                                " sent (via slack and/or email) to %s"
                                " groups." % (len(targets)))
                    else:
                        replier("Spam not sent (either no targets found"
                                " or no requested spam mechanisms"
                                " provided).")
                else:
                    replier("Notification cancelled.")
            else:
                replier("Sorry I could not find `%s` in %s clouds,"
                        " try another?" % (hypervisor, searched_clouds))
Exemplo n.º 18
0
class DescribeServerHandler(Searcher, handler.TriggeredHandler):
    """Finds a virtual machine and describes it.

    This is on purpose eerily similar to OSC `server show` command.
    """

    requires_topo_loader = True
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack server show', takes_args=True),
        ],
        'args': {
            'order': [
                'server',
                'only_private',
                'cloud',
            ],
            'help': {
                'server':
                'server (name or id)',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
            },
            'converters': {
                'only_private': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("server"): All(su.string_types(), Length(min=1)),
                Required("only_private"): bool,
                Required("cloud"): su.string_types(),
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud_viewers'),
    }

    def _run(self, server, only_private=True, cloud=''):
        # Search should be unique across clouds
        target_search = True
        if uuidutils.is_uuid_like(server):
            # Find by UUID
            filters = {'uuid': server}
        elif netutils.is_valid_ip(server):
            # Find by IP address
            # Note: Much more expensive. Calling when exactly like an IP.
            filters = {'ip': _force_exact(server)}
        else:
            # Find by name (across all clouds)
            filters = {'name': _force_exact(server)}
            target_search = False  # Name could exist in multiple clouds
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        servers, searched_clouds, _found_clouds = self._search(
            server,
            filters,
            target_search=target_search,
            only_private=only_private,
            cloud=cloud,
            replier=replier)
        if servers:
            self._emit_servers(servers)
            replier("Found %s possible matches, hopefully one of"
                    " them was what you were looking for..." % len(servers))
        else:
            replier("Sorry I could not find `%s` in %s clouds,"
                    " try another?" % (server, searched_clouds))