Esempio n. 1
0
class AddHandler(handler.TriggeredHandler):
    """Alias a long command to a short(er) one (for the calling user)."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('alias add', True),
        ],
        'schema':
        Schema({
            Required("long"): All(su.string_types(), Length(min=1)),
            Required("short"): All(su.string_types(), Length(min=1)),
        }),
        'args': {
            'order': [
                'long',
                'short',
            ],
            'help': {
                'long': "full command",
                'short': 'shorter alias of full command',
            },
        },
    }

    def _run(self, long, short):
        from_who = self.message.body.user_id
        if not from_who:
            return
        from_who = "user:%s" % from_who
        with self.bot.locks.brain:
            try:
                user_info = self.bot.brain[from_who]
            except KeyError:
                user_info = {}
            user_aliases = user_info.setdefault('aliases', {})
            user_aliases[short] = long
            self.bot.brain[from_who] = user_info
            self.bot.brain.sync()
            lines = [
                "Alias of `%s` to `%s` has been recorded." % (short, long),
            ]
        replier = self.message.reply_text
        replier("\n".join(lines), threaded=True, prefixed=False)
Esempio n. 2
0
class Handler(handler.TriggeredHandler):
    """Finds a hostnames ip address."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('dns lookup', takes_args=True),
        ],
        'args': {
            'order': ['hostname'],
            'help': {
                'hostname': 'hostname to lookup',
            },
            'schema':
            Schema({
                Required("hostname"): All(su.string_types(), Length(min=1)),
            }),
        },
    }

    def _run(self, hostname):
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        hostname_ip = socket.gethostbyname(hostname)
        replier("The ip address for `%s` is `%s`" % (hostname, hostname_ip))
Esempio n. 3
0
class Handler(handler.TriggeredHandler):
    """Emit some message to some set of slack channels."""

    required_clients = ('slack',)
    requires_slack_sender = True
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('emit message', takes_args=True),
        ],
        'args': {
            'order': ['channels', 'message'],
            'schema': Schema({
                Required("channels"): All(scu.string_types(), Length(min=1)),
                Required("message"): All(scu.string_types(), Length(min=1)),
            }),
            'help': {
                'channels': ('comma separated list of channels'
                             ' to broadcast to'),
                'message': 'what to broadcast',
            },
        },
    }

    def _run(self, channels, message):
        slack_sender = self.bot.slack_sender
        slack_server = self.bot.clients.slack_client.server
        ok_channels = []
        seen = set()
        for maybe_c in channels.split(","):
            maybe_c = maybe_c.strip()
            if maybe_c and maybe_c not in seen:
                tmp_c = slack_server.channels.find(maybe_c)
                if tmp_c is None:
                    raise RuntimeError("Could not find channel '%s'" % maybe_c)
                else:
                    if tmp_c.id not in seen:
                        seen.add(maybe_c)
                        seen.add(tmp_c.id)
                        ok_channels.append(tmp_c)
        for ch in ok_channels:
            slack_sender.rtm_send(message, channel=ch.id)
Esempio n. 4
0
class RemoveHandler(handler.TriggeredHandler):
    """Remove a alias to a long command (for the calling user)."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('alias remove', True),
        ],
        'schema':
        Schema({
            Required("short"): All(su.string_types(), Length(min=1)),
        }),
        'args': {
            'order': [
                'short',
            ],
            'help': {
                'short': 'alias of full command to remove',
            },
        },
    }

    def _run(self, short):
        from_who = self.message.body.user_id
        if not from_who:
            return
        from_who = "user:%s" % from_who
        lines = []
        with self.bot.locks.brain:
            try:
                user_info = self.bot.brain[from_who]
            except KeyError:
                user_info = {}
            user_aliases = user_info.get('aliases', {})
            try:
                long = user_aliases.pop(short)
                self.bot.brain[from_who] = user_info
                self.bot.brain.sync()
                lines = [
                    ("Alias of `%s` to `%s` has"
                     " been removed.") % (short, long),
                ]
            except KeyError:
                lines = [
                    "No alias found for `%s`" % short,
                ]
        if lines:
            replier = self.message.reply_text
            replier("\n".join(lines), threaded=True, prefixed=False)
Esempio n. 5
0
class DescribeUserHandler(handler.TriggeredHandler):
    """Lists the details of some ldap user."""

    required_clients = ("ldap", )
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('ldap describe user', takes_args=True),
        ],
        'args': {
            'order': [
                'user',
            ],
            'help': {
                'user': '******',
            },
            'schema':
            Schema({
                Required("user"): All(scu.string_types(), Length(min=1)),
            }),
        },
    }

    def _run(self, user):
        ldap_client = self.bot.clients.ldap_client
        tmp_user = ldap_client.describe_user(user)
        replier = self.message.reply_text
        if not tmp_user:
            replier("No user with name `%s` found." % (user),
                    threaded=True,
                    prefixed=False)
        else:
            tbl_headers = []
            row = []
            for k in sorted(tmp_user.keys()):
                v = tmp_user.get(k)
                if v is not None:
                    h_k = k.replace("_", ' ')
                    h_k = h_k[0].upper() + h_k[1:]
                    tbl_headers.append(h_k)
                    row.append(v)
            rows = [row]
            lines = [
                "```",
                tabulate.tabulate(rows, headers=tbl_headers),
                "```",
            ]
            replier("\n".join(lines), threaded=True, prefixed=False)
Esempio n. 6
0
class WatchHandler(JobWatcher):
    """Watches a jenkins jobs build."""

    build_info_delay = 10

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'followers': [
            ConsoleFollower,
            AbortFollower,
        ],
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins watch', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
                'build',
            ],
            'schema':
            Schema({
                Required("job_name"): All(scu.string_types(), Length(min=1)),
                Required("build"): int,
            }),
            'converters': {
                'build': int,
            },
            'help': {
                'job_name': "job name to watch",
                "build": "build number to watch",
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, job_name, build):
        clients = self.bot.clients
        return self._watch(job_name, build, clients.jenkins_client)
Esempio n. 7
0
class RunOneHandler(handler.TriggeredHandler):
    """Explicitly runs one periodic jobs."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics run one', takes_args=True),
        ],
        'args': {
            'order': [
                'job_id',
            ],
            'help': {
                'job_id': 'job id to run',
            },
            'schema': Schema({
                Required("job_id"): All(scu.string_types(), Length(min=1)),
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, job_id):
        job = self.bot.scheduler.get_job(job_id)
        if job is None:
            raise excp.NotFound("Could not find job id '%s'" % job_id)
        elif job.next_run_time is None:
            raise RuntimeError("Paused job '%s' can not be explicitly"
                               " ran (please resume it first)" % job_id)
        else:
            job.modify(next_run_time=self.date_wrangler.get_now())
            self.bot.scheduler.wakeup()
            self.message.reply_text("Job `%s` has had"
                                    " its next run time"
                                    " updated to be now (hopefully it"
                                    " runs soon)." % job_id,
                                    threaded=True, prefixed=False)
Esempio n. 8
0
class ResumeHandler(handler.TriggeredHandler):
    """Resumes a previously paused periodic job."""

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('periodics resume', takes_args=True),
        ],
        'args': {
            'order': [
                'job_id',
            ],
            'help': {
                'job_id': 'job id to resume',
            },
            'schema': Schema({
                Required("job_id"): All(scu.string_types(), Length(min=1)),
            }),
        },
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
    }

    def _run(self, job_id):
        job = self.bot.scheduler.get_job(job_id)
        if job is None:
            raise excp.NotFound("Could not find job id '%s'" % job_id)
        if job.next_run_time is None:
            job.resume()
            self.bot.scheduler.wakeup()
            self.message.reply_text("Job `%s` has"
                                    " been resumed." % job_id,
                                    threaded=True, prefixed=False)
        else:
            self.message.reply_text("Job `%s` is not paused (so it can"
                                    " not be resumed)." % job_id,
                                    threaded=True, prefixed=False)
Esempio n. 9
0
class JenkinsJobHealthHandler(handler.TriggeredHandler):
    """Provides job health for the jenkins the bot is connected to."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins job-health', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
            ],
            'help': {
                'job_name': 'job name to fetch (blank for all)',
            },
            'schema': Schema({
                Required("job_name"): scu.string_types(),
            }),
            'defaults': {
                'job_name': '',
            },
        },
    }
    required_clients = ('jenkins', )
    folder_jobs = ['master']

    @classmethod
    def _iter_jobs(cls, jenkins_client, jobs, folders):
        all_jobs = []
        for job in jobs:
            all_jobs.append((job, job.name))
        # These folders are typically our multibranch jobs, and we
        # care about how some job(s) under that are working out; so
        # find those job(s) if we can...
        for f in folders:
            for f_job_name in cls.folder_jobs:
                f_job_full_name = f.name + "/job/" + f_job_name
                f_job_short_name = f.name + "/" + f_job_name
                f_job = jenkins_client.get_job(f_job_full_name)
                if f_job is not None:
                    all_jobs.append((f_job, f_job_short_name))
        for job, job_name in sorted(all_jobs, key=lambda v: v[1].lower()):
            yield job, job_name

    @classmethod
    def insert_periodics(cls, bot, scheduler):
        try:
            health_report_period = bot.config.jenkins.health_report_period
        except AttributeError:
            pass
        else:
            slack_client = bot.clients.get("slack_client")
            slack_sender = bot.slack_sender
            if slack_client is not None and slack_sender is not None:
                hr = peu.make_periodic_runner("jenkins health report",
                                              cls,
                                              health_report_period,
                                              channel=bot.config.admin_channel,
                                              log=LOG)
                hr.__module__ = __name__
                hr.__name__ = "run_check_jenkins_health"
                hr_trigger = cron.CronTrigger.from_crontab(
                    health_report_period, timezone=bot.config.tz)
                hr_name = reflection.get_callable_name(hr)
                hr_description = "Periodically analyzes jenkins job health."
                scheduler.add_job(
                    hr,
                    trigger=hr_trigger,
                    jobstore='memory',
                    name="\n".join([hr_name, hr_description]),
                    id=utils.hash_pieces(
                        [health_report_period, hr_name, hr_description],
                        max_len=8),
                    args=(bot, slack_client, slack_sender),
                    coalesce=True)

    def _run(self, job_name=''):
        jenkins_client = self.bot.clients.jenkins_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        replier_attachments = self.message.reply_attachments
        replier("Calculating jenkins job health, please wait...")
        jobs = []
        folders = []
        for thing in jenkins_client.iter_jobs(yield_folders=True,
                                              expand_folders=False):
            if isinstance(thing, tj.JobFolder):
                folders.append(thing)
            else:
                jobs.append(thing)
        job_lines = []
        job_colors = collections.defaultdict(int)
        for job, a_job_name in self._iter_jobs(jenkins_client, jobs, folders):
            if job_name and a_job_name != job_name:
                continue
            job_color = job.color
            if job_color.endswith("_anime"):
                job_color = job_color[:-len('_anime')]
            if job_color in ('notbuilt', 'disabled'):
                continue
            if job_color in ('green', 'blue', 'red', 'yellow'):
                # Slack doesn't seem to have a blue ball, so just
                # switch it...
                if job_color == 'blue':
                    job_color = 'green'
                pretty_job_color = ":%sball:" % job_color
            else:
                pretty_job_color = job_color
            job_colors[job_color] += 1
            job_lines.append(u"• <%s|%s> %s" %
                             (job.url, a_job_name, pretty_job_color))
        num_red = job_colors.get('red', 0)
        num_yellow = job_colors.get('yellow', 0)
        num_ok = job_colors.get("green", 0)
        attachment = {
            'pretext': 'Report for `%s`' % jenkins_client.base_url,
            'text': "\n".join(job_lines),
            'mrkdwn_in': ['text', 'pretext'],
        }
        if num_red:
            attachment['color'] = su.COLORS['red']
        if num_yellow and not num_red:
            attachment['color'] = su.COLORS['yellow']
        if not num_yellow and not num_red and num_ok:
            attachment['color'] = su.COLORS['green']
        replier_attachments(attachments=[attachment],
                            log=LOG,
                            link_names=True,
                            as_user=True,
                            text=' ',
                            thread_ts=self.message.body.ts,
                            channel=self.message.body.channel,
                            unfurl_links=True)
Esempio n. 10
0
class ListServersOnHypervisor(Searcher, handler.TriggeredHandler):
    """Lists virtual machines on a hypervisor."""

    requires_topo_loader = True
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack hypervisor list-vms', takes_args=True),
        ],
        'args': {
            'order': [
                'hypervisor',
                'only_private',
                'cloud',
            ],
            'help': {
                'hypervisor':
                'hypervisor to list vms on',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
            },
            'converters': {
                'only_private': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("hypervisor"):
                All(su.string_types(), Length(min=1)),
                Required("only_private"):
                bool,
                Required("cloud"):
                su.string_types(),
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud_viewers'),
    }

    def _run(self, hypervisor, only_private=True, cloud=''):
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        servers, searched_clouds, _found_clouds = self._search(
            hypervisor, {'host': hypervisor},
            target_search=True,
            only_private=only_private,
            cloud=cloud,
            replier=replier)
        if servers:
            self._emit_servers(servers)
            replier("Found %s possible matches, hopefully one of"
                    " them was what you were looking for..." % len(servers))
        else:
            replier("Sorry I could not find `%s` in %s clouds,"
                    " try another?" % (hypervisor, searched_clouds))
Esempio n. 11
0
class NotifyOwnersOfServersOnHypervisor(Searcher, handler.TriggeredHandler):
    """Notify some owners of VMs on a hypervisor about something."""

    requires_topo_loader = True
    required_clients = ('ecm', )
    confirms_action = 'notification'
    confirms_what = 'something'
    template_subdir = 'maintenance'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack hypervisor notify-vm-owners',
                            takes_args=True),
        ],
        'args': {
            'order': [
                'hypervisor',
                'template',
                'what',
                'description',
                'when',
                'only_private',
                'cloud',
                # Various ecm passthroughs...
                'test_mode',
                'notify_slack',
                'notify_email',
            ],
            'help': {
                'hypervisor':
                'hypervisor to find vms on',
                'template':
                "notification template to use",
                'what':
                'one word for what is about to happen',
                'when': ("when the event is going to happen"
                         " in iso8601 format (if not"
                         " provided then the current time is used)"),
                'description':
                'multiple words for what is about to happen',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
                # Various ecm passthroughs...
                'test_mode':
                'ecm notification api test mode passthrough',
                'notify_slack':
                'send notification via slack',
                'notify_email':
                'send notification via email',
            },
            # This will be filled in during setup_class call (since it
            # needs semi-dynamic information from the bot configuration).
            'converters': {},
            'schema':
            Schema({
                Required("hypervisor"):
                All(su.string_types(), Length(min=1)),
                Required("only_private"):
                bool,
                Required("cloud"):
                su.string_types(),
                Required("what"):
                All(su.string_types(), Length(min=1)),
                Required("description"):
                All(su.string_types(), Length(min=1)),
                Required("template"):
                All(su.string_types(), Length(min=1)),
                Required("when"):
                Any(None, datetime.datetime),
                # Various ecm passthroughs...
                Required("test_mode"):
                bool,
                Required("notify_email"):
                bool,
                Required("notify_slack"):
                bool,
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
                'when': None,
                # Various ecm passthroughs...
                'test_mode': False,
                'notify_slack': True,
                'notify_email': True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    @classmethod
    def setup_class(cls, bot):
        tz = bot.config.tz
        cls.handles_what['args']['converters'].update({
            'only_private':
            hu.strict_bool_from_string,
            'when':
            functools.partial(_convert_dt, pytz.timezone(tz)),
            # Various ecm passthroughs...
            'test_mode':
            hu.strict_bool_from_string,
            'notify_slack':
            hu.strict_bool_from_string,
            'notify_email':
            hu.strict_bool_from_string,
        })

    def _build_template(self,
                        servers,
                        hypervisor,
                        template,
                        what,
                        when,
                        description,
                        test_mode=False):
        tmp_servers = []
        for s in servers:
            s_owner = None
            try:
                s_owner = s.metadata.owning_group
            except AttributeError:
                pass
            if s_owner:
                # Present a smaller view of which servers are here (for now).
                tmp_servers.append(
                    munch.Munch({
                        'id': s.id,
                        'owner': s_owner,
                        'name': s.name,
                    }))
        subject = self.render_template('hv_subject', {'what': what.title()})
        subject = subject.strip()
        body = self.render_template(
            template, {
                'hypervisor': hypervisor,
                'vms': tmp_servers,
                'what': what,
                'description': description,
                'when': when,
                'subject': subject,
                'test_mode': test_mode,
            })
        return subject, body

    def _run(self,
             hypervisor,
             template,
             what,
             description,
             when=None,
             only_private=True,
             cloud='',
             test_mode=False,
             notify_email=True,
             notify_slack=True):
        ecm = self.bot.clients.ecm_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        if when is None:
            when = self.date_wrangler.get_now()
        if not self.template_exists(template):
            replier("Template `%s` does not exist. Try again." % template)
        else:
            servers, searched_clouds, _found_clouds = self._search(
                hypervisor, {'host': hypervisor},
                target_search=True,
                only_private=only_private,
                cloud=cloud,
                replier=replier)
            if servers:
                self._emit_servers(servers)
                subject, body = self._build_template(servers,
                                                     hypervisor,
                                                     template,
                                                     what,
                                                     when,
                                                     description,
                                                     test_mode=test_mode)
                attachment = {
                    'pretext':
                    ("Found %s servers hosted on hypervisor `%s`, please"
                     " confirm that you wish to notify owners"
                     " of these servers using bundled template"
                     " `%s`." % (len(servers), hypervisor, template)),
                    'text':
                    "\n".join([
                        "_Subject:_ `%s`" % subject,
                        "_Body:_",
                        '```',
                        body,
                        '```',
                    ]),
                    'mrkdwn_in': ["text", 'pretext'],
                }
                self.message.reply_attachments(
                    attachments=[attachment],
                    log=LOG,
                    link_names=True,
                    as_user=True,
                    text=' ',
                    thread_ts=self.message.body.ts,
                    channel=self.message.body.channel,
                    unfurl_links=False)
                f = followers.ConfirmMe(confirms_what='notification')
                replier(f.generate_who_satisifies_message(self))
                self.wait_for_transition(wait_timeout=300,
                                         wait_start_state='CONFIRMING',
                                         follower=f)
                if self.state != 'CONFIRMED_CANCELLED':
                    self.change_state("SPAMMING")
                    admin_owning_group = self.config.get('admin_owning_group')
                    sent, _unknowns, targets = ecm.notify_server_owners(
                        servers,
                        subject,
                        body,
                        test_mode=test_mode,
                        notify_email=notify_email,
                        notify_slack=notify_slack,
                        admin_owning_group=admin_owning_group)
                    if sent:
                        replier("Notification spam"
                                " sent (via slack and/or email) to %s"
                                " groups." % (len(targets)))
                    else:
                        replier("Spam not sent (either no targets found"
                                " or no requested spam mechanisms"
                                " provided).")
                else:
                    replier("Notification cancelled.")
            else:
                replier("Sorry I could not find `%s` in %s clouds,"
                        " try another?" % (hypervisor, searched_clouds))
Esempio n. 12
0
class Handler(handler.TriggeredHandler):
    """Get stock information."""

    stock_url = 'https://www.alphavantage.co/query'

    # NOTE: If more than 100 symbols are included, the API will
    # return quotes for the first 100 symbols.
    #
    # In order to fix that just split into 100 size chunks...
    max_per_call = 100

    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('stock', takes_args=True),
        ],
        'args': {
            'order': ['symbols'],
            'converters': {},
            'schema': Schema({
                Required("symbols"): All(scu.string_types(), Length(min=1)),
            }),
            'help': {
                'symbols': 'symbol(s) to lookup (comma separated)',
            },
            'defaults': {
                'symbols': 'gddy',
            },
        },
    }

    def _run(self, **kwargs):
        symbols = kwargs.get('symbols', "")
        symbols = symbols.split(",")
        symbols = [s.strip() for s in symbols if s.strip()]
        seen_symbols = set()
        headers = ["Symbol", "Price", "Volume"]
        rows = []
        uniq_symbols = []
        for s in symbols:
            tmp_s = s.upper()
            if tmp_s in seen_symbols:
                continue
            else:
                uniq_symbols.append(tmp_s)
                seen_symbols.add(tmp_s)
        for batch in utils.iter_chunks(uniq_symbols, self.max_per_call):
            url = self.stock_url + "?"
            url += urllib.urlencode({
                'function': 'BATCH_STOCK_QUOTES',
                'symbols': ",".join(batch),
                'datatype': 'csv',
                'apikey': self.config.stock.apikey,
            })
            resp = requests.get(url)
            resp.raise_for_status()
            for row in csv.DictReader(
                    six.StringIO(resp.content.decode('utf-8'))):
                rows.append([
                    row['symbol'],
                    row['price'],
                    row['volume'],
                ])
        lines = [
            "```",
            tabulate.tabulate(rows, headers=headers),
            "```",
        ]
        replier = self.message.reply_text
        replier("\n".join(lines), threaded=True, prefixed=False)
Esempio n. 13
0
class ListHandler(handler.TriggeredHandler):
    """Lists the members of a ldap group."""

    required_clients = ("ldap", "github")
    max_before_gist = 100
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('ldap list', takes_args=True),
        ],
        'args': {
            'order': [
                'group',
            ],
            'help': {
                'group': 'ldap group to list',
            },
            'schema':
            Schema({
                Required("group"): All(scu.string_types(), Length(min=1)),
            }),
        },
    }

    def _run(self, group):
        replier = self.message.reply_text
        ldap_client = self.bot.clients.ldap_client
        group_members = [
            ldap_utils.explode_member(member)
            for member in ldap_client.list_ldap_group(group)
        ]
        group_members = sorted(group_members, key=lambda m: m.get("CN"))
        tbl_headers = ['CN', 'DC', 'OU']
        rows = []
        for member in group_members:
            row = []
            for k in tbl_headers:
                v = member.get(k)
                if isinstance(v, list):
                    v = ", ".join(v)
                row.append(v)
            rows.append(row)
        if len(group_members) <= self.max_before_gist:
            lines = [
                "```",
                tabulate.tabulate(rows, headers=tbl_headers),
                "```",
            ]
            replier("\n".join(lines), threaded=True, prefixed=False)
        else:
            github_client = self.bot.clients.github_client
            me = github_client.get_user()
            to_send = {}
            upload_what = [
                ('listing', tabulate.tabulate(rows, headers=tbl_headers)),
            ]
            for what_name, contents in upload_what:
                # Github has upper limit on postings to 1MB
                contents = _chop(contents, units.Mi)
                contents = contents.strip()
                name = what_name + ".txt"
                to_send[name] = github.InputFileContent(contents)
            if to_send:
                try:
                    gist = me.create_gist(True, to_send)
                except Exception:
                    LOG.warning(
                        "Failed uploading gist for listing"
                        " of '%s' ldap group", group)
                else:
                    lines = [
                        "Gist url at: %s" % gist.html_url,
                    ]
                    replier("\n".join(lines), threaded=True, prefixed=False)
Esempio n. 14
0
class DescribeServerHandler(Searcher, handler.TriggeredHandler):
    """Finds a virtual machine and describes it.

    This is on purpose eerily similar to OSC `server show` command.
    """

    requires_topo_loader = True
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('openstack server show', takes_args=True),
        ],
        'args': {
            'order': [
                'server',
                'only_private',
                'cloud',
            ],
            'help': {
                'server':
                'server (name or id)',
                'only_private': ('only search the private clouds'
                                 ' and skip the public clouds'),
                'cloud': ("filter to only specific cloud (empty"
                          " searches all clouds)"),
            },
            'converters': {
                'only_private': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("server"): All(su.string_types(), Length(min=1)),
                Required("only_private"): bool,
                Required("cloud"): su.string_types(),
            }),
            'defaults': {
                'only_private': True,
                'cloud': '',
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud_viewers'),
    }

    def _run(self, server, only_private=True, cloud=''):
        # Search should be unique across clouds
        target_search = True
        if uuidutils.is_uuid_like(server):
            # Find by UUID
            filters = {'uuid': server}
        elif netutils.is_valid_ip(server):
            # Find by IP address
            # Note: Much more expensive. Calling when exactly like an IP.
            filters = {'ip': _force_exact(server)}
        else:
            # Find by name (across all clouds)
            filters = {'name': _force_exact(server)}
            target_search = False  # Name could exist in multiple clouds
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        servers, searched_clouds, _found_clouds = self._search(
            server,
            filters,
            target_search=target_search,
            only_private=only_private,
            cloud=cloud,
            replier=replier)
        if servers:
            self._emit_servers(servers)
            replier("Found %s possible matches, hopefully one of"
                    " them was what you were looking for..." % len(servers))
        else:
            replier("Sorry I could not find `%s` in %s clouds,"
                    " try another?" % (server, searched_clouds))
Esempio n. 15
0
class CalcSizeHandler(handler.TriggeredHandler):
    """Determines size of docker artifactory repositories."""

    config_section = 'artifactory'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('artifactory calculate size', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': ['project'],
            'help': {
                'project': 'project to scan',
            },
            'schema': Schema({
                Required("project"): All(su.string_types(), Length(min=1)),
            }),
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
    )
    required_configurations = ('base_url',)

    def _run(self, project):
        ro_account = self.bot.secrets.ci.artifactory.ro_account

        path = _find_path(self.config, project, ro_account)
        if not path:
            raise excp.NotFound("Could not find project '%s'" % project)

        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        replier("Determining current size of `%s`, please"
                " wait..." % project)

        all_sizes = [
            path.stat().size,
        ]
        child_paths = list(path.iterdir())
        child_paths = sorted(child_paths, key=lambda p: p.name)
        if child_paths:
            c_pbar = self.message.make_progress_bar(
                len(child_paths), update_period=_calc_emit_every(child_paths))
            for child_path in c_pbar.wrap_iter(child_paths):
                if self.dead.is_set():
                    break
                all_sizes.append(child_path.stat().size)
                replier("Determining total size"
                        " of top-level child `%s`, please"
                        " wait..." % child_path.name)
                sub_child_paths = list(child_path.iterdir())
                if sub_child_paths:
                    sc_pbar = self.message.make_progress_bar(
                        len(sub_child_paths),
                        update_period=_calc_emit_every(sub_child_paths))
                    for sub_child_path in sc_pbar.wrap_iter(sub_child_paths):
                        if self.dead.is_set():
                            break
                        try:
                            sub_child_size = _calc_docker_size(
                                sub_child_path, sub_child_path.stat().size)
                        except excp.NotFound:
                            sub_child_size = 0
                            for size in _iter_sizes_deep(sub_child_path):
                                if self.dead.is_set():
                                    break
                                sub_child_size += size
                        all_sizes.append(sub_child_size)

        if self.dead.is_set():
            replier("Died during scanning, please"
                    " try again next time...")
        else:
            replier(
                "Size of `%s` is %s" % (project,
                                        utils.format_bytes(
                                            sum(all_sizes), quote=True)))
Esempio n. 16
0
class PruneHandler(handler.TriggeredHandler):
    """Prunes a docker artifactory repositories."""

    config_section = 'artifactory'
    handles_what = {
        'message_matcher': matchers.match_or(
            matchers.match_slack("message"),
            matchers.match_telnet("message")
        ),
        'channel_matcher': matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('artifactory prune', takes_args=True),
        ],
        'authorizer': auth.user_in_ldap_groups('admins_cloud'),
        'args': {
            'order': ['project', 'target_size'],
            'help': {
                'project': 'project to scan',
                'target_size': 'target size to prune project repo to',
            },
            'schema': Schema({
                Required("project"): All(su.string_types(), Length(min=1)),
                Required("target_size"): All(int, Range(min=0)),
            }),
            'converters': {
                'target_size': functools.partial(strutils.string_to_bytes,
                                                 return_int=True,
                                                 # Because artifactory
                                                 # is using the SI
                                                 # system... arg...
                                                 unit_system='SI'),
            },
        },
    }
    required_secrets = (
        'ci.artifactory.ro_account',
        'ci.artifactory.push_account',
    )
    required_configurations = ('base_url',)

    def _do_prune(self, prune_what):
        dirs_pruned = 0
        files_pruned = 0
        was_finished = True
        pbar = self.message.make_progress_bar(
            len(prune_what), update_period=_calc_emit_every(prune_what))
        for child in pbar.wrap_iter(prune_what):
            if self.dead.is_set():
                was_finished = False
                break
            stack = collections.deque()
            stack.append((child.path, False))
            while stack:
                # NOTE: we do not check dead.is_set() here which might
                # be ok, but is done so that we don't delete a sub child
                # half-way (which if done may leave any docker images
                # half-way-working... ie missing components/layers...
                # which would be bad).
                p, p_visited = stack.pop()
                p_is_dir = p.is_dir()
                if p_is_dir and not p_visited:
                    stack.append((p, True))
                    stack.extend((c_p, False) for c_p in p.iterdir())
                elif p_is_dir and p_visited:
                    p.rmdir()
                    dirs_pruned += 1
                else:
                    p.unlink()
                    files_pruned += 1
        return (dirs_pruned, files_pruned, was_finished)

    def _do_scan(self, replier, path, target_size):
        root_child_paths = list(path.iterdir())
        all_sub_children = []
        replier("Finding all sub-children of"
                " %s top-level children." % len(root_child_paths))

        if root_child_paths:
            pbar = self.message.make_progress_bar(
                len(root_child_paths),
                update_period=_calc_emit_every(root_child_paths))
            for child_path in pbar.wrap_iter(root_child_paths):
                if self.dead.is_set():
                    raise excp.Dying
                replier("Scanning top-level"
                        " child `%s`, please wait..." % child_path.name)
                sub_child_paths = list(child_path.iterdir())
                if sub_child_paths:
                    rc_pbar = self.message.make_progress_bar(
                        len(sub_child_paths),
                        update_period=_calc_emit_every(sub_child_paths))
                    for sub_child_path in rc_pbar.wrap_iter(sub_child_paths):
                        if self.dead.is_set():
                            raise excp.Dying
                        all_sub_children.append(munch.Munch({
                            'path': sub_child_path,
                            'frozen': _is_frozen(sub_child_path),
                            'ctime': sub_child_path.stat().ctime,
                            'size': sub_child_path.stat().size,
                            'parent': child_path,
                        }))

        all_sub_children = sorted(all_sub_children, key=lambda p: p.ctime)
        num_childs_frozen = sum(int(sc.frozen) for sc in all_sub_children)
        replier("Determining total sizes"
                " of %s sub-children"
                " (%s are frozen)." % (len(all_sub_children),
                                       num_childs_frozen))
        if all_sub_children:
            pbar = self.message.make_progress_bar(
                len(all_sub_children),
                update_period=_calc_emit_every(all_sub_children))
            for sub_child in pbar.wrap_iter(all_sub_children):
                if self.dead.is_set():
                    raise excp.Dying
                try:
                    total_size = _calc_docker_size(sub_child.path,
                                                   sub_child.size)
                except excp.NotFound:
                    total_size = 0
                    for size in _iter_sizes_deep(sub_child.path):
                        if self.dead.is_set():
                            raise excp.Dying
                        total_size += size
                sub_child.total_size = total_size

        accum_size = 0
        prune_what = []
        for sub_child in reversed(all_sub_children):
            if sub_child.frozen:
                continue
            accum_size += sub_child.total_size
            if accum_size >= target_size:
                prune_what.append(sub_child)
        prune_what.reverse()
        return prune_what

    def _format_child(self, child):
        try:
            child_pretext = "%s/%s" % (child.parent.name, child.path.name)
        except AttributeError:
            child_pretext = "%s" % child.path.name
        attachment = {
            'pretext': child_pretext,
            'mrkdwn_in': [],
            'footer': "Artifactory",
            'footer_icon': ART_ICON,
        }
        tot_size = utils.format_bytes(child.total_size)
        attachment['fields'] = [
            {
                'title': 'Size',
                'value': tot_size,
                'short': utils.is_short(tot_size),
            },
            {
                "title": "Created on",
                "value": _format_dt(child.ctime),
                "short": True,
            },
        ]
        return attachment

    def _run(self, project, target_size):
        push_account = self.bot.secrets.ci.artifactory.push_account
        path = _find_path(self.config, project, push_account)
        if not path:
            raise excp.NotFound("Could not find project '%s'" % project)
        replier = functools.partial(self.message.reply_text,
                                    threaded=True, prefixed=False)
        replier("Scanning `%s`, please wait..." % project)
        try:
            prune_what = self._do_scan(replier, path, target_size)
        except excp.Dying:
            replier("Died during scanning, please try"
                    " again next time...")
            return
        if not prune_what:
            replier("Nothing to prune found.")
            return
        self.message.reply_attachments(
            attachments=list(self._format_child(c) for c in prune_what),
            log=LOG, link_names=True, as_user=True,
            thread_ts=self.message.body.ts,
            channel=self.message.body.channel)
        replier("Please confirm the pruning of"
                " %s paths." % len(prune_what))
        f = followers.ConfirmMe(confirms_what='pruning')
        replier(f.generate_who_satisifies_message(self))
        self.wait_for_transition(wait_timeout=300, follower=f,
                                 wait_start_state='CONFIRMING')
        if self.state != 'CONFIRMED_CANCELLED':
            self.change_state("PRUNING")
            replier("Initiating prune of %s paths." % len(prune_what))
            dirs_pruned, files_pruned, done = self._do_prune(prune_what)
            replier("Pruned %s directories and"
                    " %s files." % (dirs_pruned, files_pruned))
            if not done:
                replier("This was a partial prune, since I died"
                        " during pruning, please try"
                        " again next time...")
        else:
            replier("Pruning cancelled.")
Esempio n. 17
0
class Handler(handler.TriggeredHandler):
    """Searches various elastic indexes for things in (log)message fields."""

    index_and_query = [
        ('dcr.compute_*-*', 'message:"%(thing)s"'),
        ('dcr.openstack_logstash-*', 'logmessage:"%(thing)s"'),
    ]

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('elastic search logs', takes_args=True),
        ],
        'authorizer':
        auth.message_from_channels(['openstack', 'team-openstack-eng']),
        'args': {
            'order': ['thing'],
            'help': {
                'thing': 'thing to find logs for',
            },
            'schema':
            Schema({
                Required("thing"): All(su.string_types(), Length(min=1)),
            }),
        },
    }
    required_clients = (
        'github',
        'elastic',
    )

    @staticmethod
    def _chop(fh, max_am):
        left, contents = utils.read_backwards_up_to(fh, max_am)
        if left:
            tmp_contents = "%s more..." % left
            tmp_contents += " " + contents
            contents = tmp_contents
        return contents

    def _run(self, thing):
        github_client = self.bot.clients.github_client
        elastic_client = self.bot.clients.elastic_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        replier("Initiating scan for `%s`." % thing)
        to_send = {}
        for index, query_tpl in self.index_and_query:
            query = query_tpl % {'thing': thing}
            replier("Scanning index `%s` using query `%s`." % (index, query))
            s = (e_dsl.Search(using=elastic_client).query(
                "query_string", query=query).sort("-@timestamp").index(index))
            s_buf = six.StringIO()
            for i, h in enumerate(s.scan()):
                h_header = "Hit %s" % (i + 1)
                h_header_delim = "-" * len(h_header)
                h_header += "\n"
                h_header += h_header_delim
                h_header += "\n"
                s_buf.write(h_header)
                s_buf.write(_format_hit(h))
                s_buf.write("\n")
            # Github has upper limit on postings to 1MB
            s_buf = self._chop(s_buf, units.Mi)
            if s_buf:
                # Because github...
                s_buf_name = re.sub(r"\.|\-|\*|_", "", index)
                s_buf_name = s_buf_name + ".txt"
                to_send[s_buf_name] = ghe.InputFileContent(s_buf)
        if not to_send:
            replier("No scan results found.")
        else:
            replier("Uploading %s scan results to gist." % len(to_send))
            me = github_client.get_user()
            gist = me.create_gist(True, to_send)
            replier("Gist url at: %s" % gist.html_url)
Esempio n. 18
0
class UnplannedHandler(handler.TriggeredHandler):
    """Creates a unplanned issue + associates it to an active sprint."""

    # Because the client library fetches things over and over
    # and things we know to be the same, aren't changing a lot/ever...
    #
    # Size of these was picked somewhat arbitrarily but should be fine...
    cache = munch.Munch({
        'projects': LRUCache(maxsize=100),
        'boards': LRUCache(maxsize=100),
    })
    required_clients = ('jira', )
    config_section = 'jira'
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jira unplanned', takes_args=True),
        ],
        'args': {
            'order': [
                'summary',
                'time_taken',
                'was_resolved',
                'project',
                'board',
            ],
            'converters': {
                'time_taken': _convert_time_taken,
                'was_resolved': hu.strict_bool_from_string,
            },
            'schema':
            Schema({
                Required("summary"): All(scu.string_types(), Length(min=1)),
                Required("project"): All(scu.string_types(), Length(min=1)),
                Required("board"): All(scu.string_types(), Length(min=1)),
                Required("time_taken"): int,
                Required("was_resolved"): bool,
            }),
            'help': {
                'summary':
                "short summary of the unplanned work",
                'board':
                'board to locate sprint to'
                ' drop newly created issue in (must exist)',
                'time_taken': ('time taken on unplanned'
                               ' work (ie 30 seconds, 5 minutes,'
                               ' 1 hour, 1 day...)'),
                'project':
                'project to create task in (must exist)',
                'was_resolved':
                'mark the newly created issue as resolved',
            },
            'defaults': {
                'project': 'CAA',
                'board': 'CAA board',
                'time_taken': "1 hour",
                "was_resolved": True,
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }

    @staticmethod
    def _find_and_cache(fetcher_func, match_func, cache_target, cache_key):
        if cache_key and cache_key in cache_target:
            return cache_target[cache_key]
        offset = 0
        result = None
        found = False
        while not found:
            items = fetcher_func(start_at=offset)
            if not items:
                break
            else:
                for item in items:
                    if match_func(item):
                        result = item
                        found = True
                        break
                if not found:
                    offset = offset + len(items) + 1
        if found and cache_key:
            cache_target[cache_key] = result
        return result

    @classmethod
    def _find_project(cls, jac, project):
        def match_func(p):
            return (p.name.lower() == project.lower()
                    or p.key.lower() == project.lower() or p.id == project)

        def fetcher_func(all_projects, start_at):
            return all_projects[start_at:]

        return cls._find_and_cache(
            functools.partial(fetcher_func, jac.projects()), match_func,
            cls.cache.projects, project)

    @classmethod
    def _find_board(cls, jac, board, type='scrum'):
        def match_func(b):
            return (b.name.lower() == board.lower() or b.id == board)

        def fetcher_func(start_at):
            return jac.boards(type=type, startAt=start_at)

        return cls._find_and_cache(fetcher_func, match_func, cls.cache.boards,
                                   ":".join([board, type]))

    @classmethod
    def _find_sprint(cls, jac, board, board_name, ok_states):
        def match_func(s):
            return s.state.lower() in ok_states

        def fetcher_func(start_at):
            return jac.sprints(board.id, startAt=start_at)

        # We don't want to cache anything, since we expect sprints to
        # actually become active/inactive quite a bit...
        return cls._find_and_cache(fetcher_func, match_func, {}, None)

    @staticmethod
    def _create_issue(jac,
                      project,
                      secs_taken,
                      summary,
                      user_name,
                      channel_name='',
                      quick_link=None):
        mins_taken = secs_taken / 60.0
        hours_taken = mins_taken / 60.0
        days_taken = hours_taken / 24.0
        time_taken_pieces = [
            "%0.2f days" % (days_taken),
            "%0.2f hours" % (hours_taken),
            "%0.2f minutes" % (mins_taken),
            "%s seconds" % (secs_taken),
        ]
        time_taken_text = " or ".join(time_taken_pieces)
        new_issue_description_lines = [
            ("User @%s spent %s doing"
             " unplanned work.") % (user_name, time_taken_text),
        ]
        if channel_name:
            new_issue_description_lines.extend([
                "",
                "In channel: #%s" % channel_name,
            ])
        if quick_link:
            new_issue_description_lines.extend([
                "",
                "Reference: %s" % quick_link,
            ])
        new_issue_fields = {
            'summary': summary,
            'issuetype': {
                'name': 'Task',
            },
            'components': [{
                'name': "Unplanned"
            }],
            'assignee': {
                'name': user_name,
            },
            'project': project.id,
            'description': "\n".join(new_issue_description_lines),
        }
        new_issue = jac.create_issue(fields=new_issue_fields)
        new_issue_link = "<%s|%s>" % (new_issue.permalink(), new_issue.key)
        return (new_issue, new_issue_link)

    def _run(self, summary, time_taken, was_resolved, project, board):
        # Load and validate stuff (before doing work...)
        jac = self.bot.clients.jira_client
        replier = functools.partial(self.message.reply_text,
                                    threaded=True,
                                    prefixed=False)
        # This one is used here because it appears the the RTM one isn't
        # processing/sending links correctly (did it ever, but this one
        # does handle links right, so ya...)
        reply_attachments = functools.partial(
            self.message.reply_attachments,
            log=LOG,
            link_names=True,
            as_user=True,
            thread_ts=self.message.body.ts,
            channel=self.message.body.channel,
            unfurl_links=False)
        j_project = self._find_project(jac, project)
        if not j_project:
            raise excp.NotFound("Unable to find project '%s'" % (project))
        j_board = self._find_board(jac, board)
        if not j_board:
            raise excp.NotFound("Unable to find board '%s'" % (board))
        # Create it in that project...
        replier("Creating unplanned issue"
                " in project `%s`, please wait..." % (project))
        new_issue, new_issue_link = self._create_issue(
            jac,
            j_project,
            time_taken,
            summary,
            self.message.body.user_name,
            channel_name=self.message.body.get('channel_name'),
            quick_link=self.message.body.get('quick_link'))
        reply_attachments(attachments=[{
            'pretext': ("Created unplanned"
                        " issue %s.") % (new_issue_link),
            'mrkdwn_in': ['pretext'],
        }])
        # Find and bind it to currently active sprint (if any)...
        j_sprint = self._find_sprint(jac, j_board, board, ['active'])
        if j_sprint:
            reply_attachments(attachments=[{
                'pretext': ("Binding %s to active sprint `%s`"
                            " of board `%s`." %
                            (new_issue_link, j_sprint.name, board)),
                'mrkdwn_in': ['pretext'],
            }])
            jac.add_issues_to_sprint(j_sprint.id, [new_issue.key])
            reply_attachments(attachments=[{
                'pretext': ("Bound %s to active sprint `%s`"
                            " of board `%s`." %
                            (new_issue_link, j_sprint.name, board)),
                'mrkdwn_in': ['pretext'],
            }])
        else:
            replier("No active sprint found"
                    " in board `%s`, sprint binding skipped." % (board))
        # Mark it as done...
        if was_resolved:
            transition = None
            possible_transitions = set()
            for t in jac.transitions(new_issue.id):
                t_name = t.get('name', '')
                t_name = t_name.lower()
                if t_name in _RESOLVED_TRANSITIONS:
                    transition = t
                if t_name:
                    possible_transitions.add(t_name)
            if not transition:
                possible_transitions = sorted(possible_transitions)
                possible_transitions = " or ".join(
                    ["`%s`" % t.upper() for t in possible_transitions])
                ok_transitions = sorted(_RESOLVED_TRANSITIONS)
                ok_transitions = " or ".join(
                    ["`%s`" % t.upper() for t in ok_transitions])
                reply_attachments(attachments=[{
                    'pretext': ("Unable to resolve %s, could not find"
                                " issues %s"
                                " state transition!") %
                    (new_issue_link, ok_transitions),
                    'mrkdwn_in': ['pretext', 'text'],
                    "text": ("Allowable state"
                             " transitions: %s" % possible_transitions),
                }])
            else:
                reply_attachments(attachments=[{
                    'pretext': ("Transitioning %s issue to resolved, "
                                "please wait...") % (new_issue_link),
                    'mrkdwn_in': ['pretext'],
                }])
                jac.transition_issue(new_issue.id,
                                     transition['id'],
                                     comment="All done! kthxbye")
                replier("Transitioned.")
        replier = self.message.reply_text
        replier("Thanks for tracking your unplanned work!",
                prefixed=True,
                threaded=True)
Esempio n. 19
0
class ConsoleHandler(handler.TriggeredHandler):
    """Gets a jenkins jobs console log."""

    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('jenkins console', takes_args=True),
        ],
        'args': {
            'order': [
                'job_name',
                'build',
                'lines',
            ],
            'schema':
            Schema({
                Required("job_name"): All(scu.string_types(), Length(min=1)),
                Required("build"): int,
                Required("lines"): int,
            }),
            'converters': {
                'build': int,
                'lines': int,
            },
            'defaults': {
                'lines': CONSOLE_LINES,
            },
            'help': {
                'job_name':
                "job name to fetch",
                "build":
                "build identifier to fetch",
                "lines": ("maximum number of lines from the"
                          " console to respond"
                          " with (negative for no limit)"),
            },
        },
        'authorizer':
        auth.user_in_ldap_groups('admins_cloud'),
    }
    required_clients = ('jenkins', )

    def _run(self, job_name, build, lines):
        replier = self.message.reply_text
        replier = functools.partial(replier, threaded=True, prefixed=False)
        replier("Fetching job `%s` build `%s`"
                " console, please wait..." % (job_name, build))
        clients = self.bot.clients
        job = clients.jenkins_client.get_job(job_name)
        if job is None:
            replier("Job `%s` was not found!" % job_name)
            return
        build_num = build
        build = job.get_build(build_num)
        if build is not None:
            console_out = build.get_console()
            console_out = _format_build_console(console_out, line_limit=lines)
            replier(console_out)
        else:
            replier("Job `%s` build `%s` was"
                    " not found!" % (job_name, build_num))
Esempio n. 20
0
class SyncHandler(handler.TriggeredHandler):
    handles_what = {
        'message_matcher':
        matchers.match_or(matchers.match_slack("message"),
                          matchers.match_telnet("message")),
        'channel_matcher':
        matchers.match_channel(c.TARGETED),
        'triggers': [
            trigger.Trigger('sync repo', True),
        ],
        'args': {
            'order': [
                'downstream_url',
                'upstream_url',
                'upstream_branch_refs',
                'upstream_tag_refs',
                'upstream_tags_as_branches_refs',
                'patch_repo_url',
                'patch_branch',
            ],
            'defaults': {
                'downstream_url': None,
                'upstream_url': None,
                'upstream_branch_refs': "master",
                'upstream_tag_refs': "",
                'upstream_tags_as_branches_refs': "",
                'patch_repo_url': "",
                'patch_branch': "master",
            },
            'converters': {},
            'schema':
            Schema({
                Required("downstream_url"):
                All(scu.string_types(), Length(min=1)),
                Required("upstream_url"):
                All(scu.string_types(), Length(min=1)),
                Optional("upstream_branch_refs"):
                All(scu.string_types(), Length(min=1)),
                Optional("upstream_tag_refs"):
                scu.string_types(),
                Optional("upstream_tags_as_branches_refs"):
                scu.string_types(),
                Optional("patch_repo_url"):
                scu.string_types(),
                Optional("patch_branch"):
                All(scu.string_types(), Length(min=1)),
            }),
            'help': {
                'downstream_url':
                "Which downstream git url to sync into?",
                'upstream_url':
                "Which upstream git url to sync from?",
                'upstream_branch_refs':
                "Which upstream branches to sync into downstream?",
                'upstream_tag_refs':
                "Which upstream tags to sync into downstream?",
                'upstream_tags_as_branches_refs':
                "Which upstream tags to sync into downstream as branches?",
            },
        },
    }
    required_clients = ('github', )
    periodic_config_path = "github.periodics"

    @staticmethod
    def _format_voluptuous_error(data,
                                 validation_error,
                                 max_sub_error_length=500):
        errors = []
        if isinstance(validation_error, MultipleInvalid):
            errors.extend(
                sorted(sub_error.path[0]
                       for sub_error in validation_error.errors))
        else:
            errors.append(validation_error.path[0])

        errors = ['`{}`'.format(e) for e in errors]
        if len(errors) == 1:
            adj = ''
            vars = errors[0]
            verb = 'is'
        elif len(errors) == 2:
            adj = 'Both of '
            vars = ' and '.join(errors)
            verb = 'are'
        else:
            adj = 'All of '
            vars = bytearray(b', '.join(errors))
            last_comma = vars.rfind(', ')
            vars[last_comma:last_comma + 2] = b', and '
            verb = 'are'
            vars = vars.decode('utf-8')

        return 'Error: {adj}{vars} {verb} required.'.format(adj=adj,
                                                            vars=vars,
                                                            verb=verb)

    def _run(self, downstream_url, upstream_url, upstream_branch_refs,
             upstream_tag_refs, upstream_tags_as_branches_refs, patch_repo_url,
             patch_branch):

        tmp_upstream_branch_refs = []
        for upstream_branch in upstream_branch_refs.split(","):
            upstream_branch = upstream_branch.strip()
            if upstream_branch:
                tmp_upstream_branch_refs.append(upstream_branch)
        upstream_branch_refs = tmp_upstream_branch_refs

        tmp_upstream_tags_refs = []
        for upstream_tag in upstream_tag_refs.split(","):
            upstream_tag = upstream_tag.strip()
            if upstream_tag:
                tmp_upstream_tags_refs.append(upstream_tag)
        upstream_tag_refs = tmp_upstream_tags_refs

        tmp_upstream_tags_as_branches_refs = []
        for upstream_tag_branch in upstream_tags_as_branches_refs.split(","):
            upstream_tag_branch = upstream_tag_branch.strip()
            if upstream_tag_branch:
                tmp_pieces = upstream_tag_branch.split(":", 2)
                tmp_tag = tmp_pieces[0]
                tmp_branch = tmp_pieces[1]
                tmp_upstream_tags_as_branches_refs.append(
                    [tmp_tag, tmp_branch])
        upstream_tags_as_branches_refs = tmp_upstream_tags_as_branches_refs

        project = upstream_url.split('/')
        project = project[-1] or project[-2]

        self.message.reply_text("Syncing repository for project `%s`..." %
                                project,
                                threaded=True,
                                prefixed=False)

        # Make temp dir for run
        tmp_dir_prefix = "github_sync_{}".format(project)
        with utils.make_tmp_dir(dir=self.bot.config.working_dir,
                                prefix=tmp_dir_prefix) as tmp_dir:
            # Clone the source repo
            try:
                source_repo = git.Repo.clone_from(
                    upstream_url, os.path.join(tmp_dir, 'source'))
                self.message.reply_text(
                    ":partyparrot: Successfully loaded repository `%s`." %
                    project,
                    threaded=True,
                    prefixed=False)
            except Exception:
                self.message.reply_text(
                    ":sadparrot: Failed to load repository `%s`." % project,
                    threaded=True,
                    prefixed=False)
                return

            # Now check patches, if we know what patch repo to use
            if patch_repo_url:
                self.message.reply_text(
                    "Checking patch compatibility for `%s` branch `%s`." %
                    (project, patch_branch),
                    threaded=True,
                    prefixed=False)

                # Clone the patch repo
                patch_repo = git.Repo.clone_from(
                    patch_repo_url, os.path.join(tmp_dir, 'patches'))
                head_commit = patch_repo.head.commit.hexsha

                # Validate patches
                r = process_utils.run(
                    [
                        'update-patches', '--branch-override', patch_branch,
                        '--patch-repo', patch_repo.working_dir
                    ],
                    cwd=os.path.join(tmp_dir, "source")  # from sync() above
                )
                try:
                    r.raise_for_status()
                    self.message.reply_text(
                        ":gdhotdog: Patch compatibility check successful.",
                        threaded=True,
                        prefixed=False)
                except process_utils.ProcessExecutionError:
                    self.message.reply_text(
                        "Patch compatibility check failed. Please do a manual "
                        "rebase!",
                        threaded=True,
                        prefixed=False)
                    attachment = {
                        'text': (":warning:"
                                 " Patches are in merge conflict in the"
                                 " repository `%s`. Manual intervention"
                                 " is required!") % project,
                        'mrkdwn_in': ['text'],
                        'color':
                        su.COLORS.purple,
                    }
                    self.message.reply_attachments(
                        attachments=[attachment],
                        log=LOG,
                        as_user=True,
                        text=' ',
                        channel=self.config.admin_channel,
                        unfurl_links=True)
                    return

                # If we made an auto-commit, PR it
                if patch_repo.head.commit.hexsha == head_commit:
                    self.message.reply_text("No patch updates detected.",
                                            threaded=True,
                                            prefixed=False)
                else:
                    new_branch = '{project}_{short_hash}'.format(
                        project=project,
                        short_hash=patch_repo.head.commit.hexsha[:8])
                    new_refspec = 'HEAD:{branch}'.format(branch=new_branch)
                    self.message.reply_text(
                        "Pushing patch updates to branch `{branch}`.".format(
                            branch=new_branch),
                        threaded=True,
                        prefixed=False)
                    patch_repo.remote().push(refspec=new_refspec)
                    patch_repo_name = patch_repo_url.split(":")[-1]
                    patch_repo_name = patch_repo_name.split('.git')[0]
                    gh_repo = self.bot.clients.github_client.get_repo(
                        patch_repo_name)
                    title, body = patch_repo.head.commit.message.split('\n', 1)
                    self.message.reply_text("Creating pull request...",
                                            threaded=True,
                                            prefixed=False)
                    pr = gh_repo.create_pull(title=title,
                                             body=body.strip(),
                                             base="master",
                                             head=new_branch)
                    self.message.reply_text(
                        ":gunter: Pull request created: {url}".format(
                            url=pr.html_url),
                        threaded=True,
                        prefixed=False)

            # Finish syncing the repo by pushing the new state
            self.message.reply_text("Pushing upstream state downstream...",
                                    threaded=True,
                                    prefixed=False)
            source_repo.heads.master.checkout()
            source_repo.remote().fetch()
            retval = git_utils.sync_push(
                working_folder=tmp_dir,
                target=downstream_url,
                push_tags=upstream_tag_refs,
                push_branches=upstream_branch_refs,
                push_tags_to_branches=upstream_tags_as_branches_refs)
            if retval == 0:
                self.message.reply_text(
                    ":partyparrot: Successfully pushed repository `%s`." %
                    project,
                    threaded=True,
                    prefixed=False)
            else:
                self.message.reply_text(
                    ":sadparrot: Failed to push repository `%s`." % project,
                    threaded=True,
                    prefixed=False)
                return
            self.message.reply_text(":beers: Done.",
                                    threaded=True,
                                    prefixed=False)