async def gurkan_count(self, ctx: Context) -> None: """ Goes through a list of all the members and uses regex to check if the member is a gurkan. Sends the count of total Gurkans in the server,\ and the percentage of the gurkans to the server members. """ members = ctx.guild.members gurkans = sum(gurkan_check(member.display_name) for member in members) rate = round((gurkans / len(members)) * 100) count_emb = Embed() if rate == 100: title = f"Whoa!! All {gurkans} members are gurkans!" color = Colours.green elif rate == 0: title = "No one is a gurkan?! That's lame." color = Colours.soft_red else: rate_m = [RATE_DICT[r] for r in RATE_DICT if rate in r][0] title = f"{Emojis.cucumber_emoji} {gurkans} members" color = Colours.green description = f"About {rate}% ({gurkans}/ {len(members)}) of members are gurkans, that's {rate_m}" count_emb.title = title count_emb.color = color count_emb.description = description await ctx.send(embed=count_emb)
async def unused(self, ctx: Context): embed = Embed(title='Unused Factoids') description = [] for fac in sorted(self.factoids.values(), key=lambda a: a['uses']): if fac['uses'] > 0: break description.append(f'- {fac["name"]}') embed.description = '```{}```'.format('\n'.join(description)) return await ctx.send(embed=embed)
async def get_commit_messages(self, event_body, brief=False): embed_commits = [] branch = event_body['ref'].split('/', 2)[2] project = event_body['repository']['full_name'] commits = event_body['commits'] if brief and len(commits) > self.config['commit_truncation_limit']: first_hash = commits[0]['id'] last_hash = commits[-2]['id'] compare_url = f'https://github.com/{project}/compare/{first_hash}^...{last_hash}' embed = Embed( title= f'Skipped {len(commits) - 1} commits... (click link for diff)', colour=Colour(self._skipped_commit_colour), url=compare_url) embed_commits.append((embed, None)) commits = commits[-1:] for commit in commits: author_username = commit['author'].get('username', None) author_name = commit['author'].get('name', None) timestamp = dateutil.parser.parse(commit['timestamp']) commit_message = commit['message'].split('\n') embed = Embed(title=commit_message[0], colour=Colour(self._commit_colour), url=commit['url'], timestamp=timestamp) if len(commit_message) > 2 and not brief: commit_body = '\n'.join(commit_message[2:]) embed.description = commit_body[:4096] author = await self.get_author_info(author_username) if author: if author['name'] and author['name'] != author['login']: author_name = f'{author["name"]} ({author["login"]})' else: author_name = author['login'] embed.set_author(name=author_name, url=author['html_url'], icon_url=author['avatar_url']) elif author_name: embed.set_author(name=author_name) else: embed.set_author(name='<No Name>') embed.set_footer(text='Commit') embed.add_field(name='Repository', value=project, inline=True) embed.add_field(name='Branch', value=branch, inline=True) embed_commits.append((embed, commit['id'])) return embed_commits
async def bottom(self, ctx: Context): embed = Embed(title='Least used Factoids') description = [ 'Pos - Factoid (uses)', '--------------------------------' ] for pos, fac in enumerate(sorted(self.factoids.values(), key=lambda a: a['uses'])[:10], start=1): description.append(f'{pos:2d}. - {fac["name"]} ({fac["uses"]})') embed.description = '```{}```'.format('\n'.join(description)) return await ctx.send(embed=embed)
async def get_pr_messages(self, event_body): pr_number = event_body['number'] title = event_body['pull_request']['title'] timestamp = dateutil.parser.parse( event_body['pull_request']['created_at']) embed = Embed(title=f'#{pr_number}: {title}', colour=Colour(self._pull_request_colour), url=event_body['pull_request']['html_url'], timestamp=timestamp) author_name = event_body['pull_request']['user']['login'] author = await self.get_author_info(author_name) if author and author['name'] and author['name'] != author['login']: author_name = f'{author["name"]} ({author["login"]})' embed.set_author( name=author_name, url=event_body['pull_request']['user']['html_url'], icon_url=event_body['pull_request']['user']['avatar_url']) embed.set_footer(text='Pull Request') embed.add_field(name='Repository', value=event_body['repository']['full_name'], inline=True) # create copy without description text for brief channel brief_embed = embed.copy() # filter out comments in template event_body['pull_request']['body'] = '\n'.join( l.strip() for l in event_body['pull_request']['body'].splitlines() if not l.startswith('<!-')) # trim message to discord limits if len(event_body['pull_request']['body']) >= 2048: embed.description = event_body['pull_request'][ 'body'][:2000] + ' [... message trimmed]' else: embed.description = event_body['pull_request']['body'] return brief_embed, embed
async def get_issue_messages(self, event_body): issue_number = event_body['issue']['number'] title = event_body['issue']['title'] timestamp = dateutil.parser.parse(event_body['issue']['created_at']) embed = Embed(title=f'#{issue_number}: {title}', colour=Colour(self._issue_colour), url=event_body['issue']['html_url'], timestamp=timestamp) author_name = event_body['issue']['user']['login'] author = await self.get_author_info(author_name) if author and author['name'] and author['name'] != author['login']: author_name = f'{author["name"]} ({author["login"]})' embed.set_author(name=author_name, url=event_body['issue']['user']['html_url'], icon_url=event_body['issue']['user']['avatar_url']) embed.set_footer(text='Issue') embed.add_field(name='Repository', value=event_body['repository']['full_name'], inline=True) # create copy without description text for brief channel brief_embed = embed.copy() event_body['issue']['body'] = '\n'.join( l.strip() for l in event_body['issue']['body'].splitlines() if not l.startswith('<!-')) issue_text = event_body['issue']['body'] # strip double-newlines from issue forms if '\n\n' in issue_text: issue_text = issue_text.replace('\n\n', '\n') if len(issue_text) >= 2048: embed.description = issue_text[:2000] + ' [... message trimmed]' else: embed.description = issue_text return brief_embed, embed
async def get_discussion_messages(self, event_body): discussion_number = event_body['discussion']['number'] title = event_body['discussion']['title'] category = event_body['discussion']['category']['name'] timestamp = dateutil.parser.parse( event_body['discussion']['created_at']) embed = Embed(title=f'#{discussion_number}: {category} - {title}', colour=Colour(self._discussion_colour), timestamp=timestamp, url=event_body['discussion']['html_url']) author_name = event_body['discussion']['user']['login'] author = await self.get_author_info(author_name) if author and author['name'] and author['name'] != author['login']: author_name = f'{author["name"]} ({author["login"]})' embed.set_author( name=author_name, url=event_body['discussion']['user']['html_url'], icon_url=event_body['discussion']['user']['avatar_url']) embed.set_footer(text='Discussion') embed.add_field(name='Repository', value=event_body['repository']['full_name'], inline=True) # create copy without description text for brief channel brief_embed = embed.copy() event_body['discussion']['body'] = '\n'.join( l.strip() for l in event_body['discussion']['body'].splitlines() if not l.startswith('<!-')) if len(event_body['discussion']['body']) >= 1024: embed.description = event_body['discussion'][ 'body'][:1024] + ' [... message trimmed]' else: embed.description = event_body['discussion']['body'] return brief_embed, embed
async def fetch_xkcd_comics(self, ctx: Context, comic: Optional[str]) -> None: """ Getting an xkcd comic's information along with the image. To get a random comic, don't type any number as an argument. To get the latest, type 'latest'. """ embed = Embed(title=f"XKCD comic '{comic}'") embed.colour = Colours.soft_red if comic and (comic := re.match(COMIC_FORMAT, comic)) is None: embed.description = ( "Comic parameter should either be an integer or 'latest'.") await ctx.send(embed=embed) return
async def get_wiki_message(self, event_body): embed = Embed(colour=Colour(self._wiki_colour)) embed.set_footer(text='GitHub Wiki Changes') # All edits in the response are from a single author author_name = event_body['sender']['login'] author = await self.get_author_info(author_name) if author and author['name'] and author['name'] != author['login']: author_name = f'{author["name"]} ({author["login"]})' embed.set_author(name=author_name, url=event_body['sender']['html_url'], icon_url=event_body['sender']['avatar_url']) embed.add_field(name='Repository', value=event_body['repository']['full_name']) body = [] for page in event_body['pages']: diff_url = f'{page["html_url"]}/_compare/{page["sha"]}^...{page["sha"]}' page_url = f'{page["html_url"]}/{page["sha"]}' body.append( f'**{page["action"]}:** [{page["title"]}]({page_url}) [[diff]({diff_url})]' ) embed.description = '\n'.join(body) return embed
async def build_update(self, run_data=None): if not self.steam_channel: logger.info('No Steam channel found, skipping build update') return recent_builds = await self.get_builds() branches = await self.get_branches() # build map from id to branch # don't do this, builds can have multiple branches! id_branch_map = defaultdict(set) for branch_name, branch in branches.items(): id_branch_map[branch['BuildID']].add(branch_name) build_map = self.bot.state['last_build_map'] new_build_map = dict() # iterate over builds, post info on all unseen builds that are on branches for build_id, build_info in sorted(recent_builds.items(), key=lambda a: int(a[0])): # for some reason, the build_id is a string here but int everywhere else build_id = int(build_id) if build_id not in id_branch_map: continue current_branches = id_branch_map[build_id] # if build is not new to this branch, skip previous_branches = set(branch for branch, old_build_id in build_map.items() if old_build_id == build_id) # copy/write to new map for branch in current_branches: new_build_map[branch] = build_id if previous_branches == current_branches: # no changes continue # we only care about branches the build is "new" to current_branches -= previous_branches # if no branches are left build was removed from a previous one, but not added to a new one if not current_branches: continue description = build_info['Description'] embed = Embed(title='New Build pushed to branch', colour=STEAMWORKS_COLOUR) embed.add_field(name='Build ID', value=build_id) embed.add_field(name='Branch(es)', value=', '.join(sorted(current_branches))) embed.add_field(name='Description', value=description) # If update was triggered by webhook, add extra info to the corresponding commit (if possible). # Also add info for nightly builds which include a commit hash in the description if 'nightly-g' in description: # fetch commit metadata based on description shorthash = description.rpartition('g')[2] commit_info = await self.get_with_retry(f'{GITHUB_API_URL}/repos/{self.config["repo"]}' f'/commits/{shorthash}') base_name = commit_info['commit']['message'].partition('\n')[0] base_url = commit_info['html_url'] embed.add_field(name='Trigger', value='cronjob/manual run' if not run_data else run_data['event']) embed.description = f'Based on Commit [{base_name}]({base_url})' elif run_data and run_data['event'] == 'release': release_tag = run_data['head_branch'] # check if build matches release tag if description.endswith(release_tag): release_info = await self.get_with_retry(f'{GITHUB_API_URL}/repos/{self.config["repo"]}' f'/releases/tags/{release_tag}') # fetch release metadata? base_type = 'Pre-Release' if release_info['prerelease'] else 'Release' base_name = release_info['name'] base_url = release_info['html_url'] embed.add_field(name='Trigger', value='release') embed.description = f'Based on {base_type} [{base_name}]({base_url})' row = ActionRow() row.add_button(style=ButtonStyle.link, label='Manage builds', url=f'https://partner.steamgames.com/apps/builds/{self.config["app_id"]}') row.add_button(style=ButtonStyle.link, label='Build details', url=f'https://partner.steamgames.com/apps/builddetails/{self.config["app_id"]}/{build_id}') # if it's a known staging branch, offer the push-to-live button for current_branch in current_branches: target_branch = self.config['branches'].get(current_branch) if target_branch and target_branch not in current_branches: row.add_button(label=f'Push to "{target_branch}" branch', style=ButtonStyle.danger, custom_id=f'steamworks_{build_id}_{target_branch}') await self.steam_channel.send(embed=embed, components=row) self.bot.state['last_build_map'] = new_build_map
def error_embed(message: str, title: Optional[str] = None) -> Embed: """Build a basic embed with red colour and either a random error title or a title provided.""" title = title or random.choice(ERROR_REPLIES) embed = Embed(colour=Colours.soft_red, title=title) embed.description = message return embed
async def paginate( cls, lines: t.List[str], ctx: Context, embed: disnake.Embed, prefix: str = "", suffix: str = "", max_lines: t.Optional[int] = None, max_size: int = 500, scale_to_size: int = 2000, empty: bool = False, restrict_to_user: User = None, timeout: int = 300, footer_text: str = None, url: str = None, allow_empty_lines: bool = False, ) -> t.Optional[disnake.Message]: """ Use a paginator and set of reactions to provide pagination over a set of lines. When used, this will send a message using `ctx.send()` and apply the pagination reactions, to control the embed. Pagination will also be removed automatically if no reaction is added for `timeout` seconds. The interaction will be limited to `restrict_to_user` (ctx.author by default) or to any user with a moderation role. Example: >>> people = ["Guido van Rossum", "Linus Torvalds", "Gurkbot", "Bjarne Stroustrup"] >>> e = disnake.Embed() >>> e.set_author(name="Ain't these people just awesome?") >>> await LinePaginator.paginate(people, ctx, e) """ def event_check(reaction_: disnake.Reaction, user_: disnake.Member) -> bool: """Make sure that this reaction is what we want to operate on.""" return ( # Conditions for a successful pagination: all(( # Reaction is on this message reaction_.message.id == message.id, # Reaction is one of the pagination emotes str(reaction_.emoji) in PAGINATION_EMOJI, # Reaction was not made by the Bot user_.id != ctx.bot.user.id, # The reaction was by a whitelisted user user_.id == restrict_to_user.id, ))) paginator = cls( prefix=prefix, suffix=suffix, max_size=max_size, max_lines=max_lines, scale_to_size=scale_to_size, ) current_page = 0 # If the `restrict_to_user` is empty then set it to the original message author. restrict_to_user = restrict_to_user or ctx.author if not lines: if not allow_empty_lines: logger.exception( "`Empty lines found, raising error as `allow_empty_lines` is `False`." ) raise EmptyPaginatorEmbed("No lines to paginate.") logger.debug( "Empty lines found, `allow_empty_lines` is `True`, adding 'nothing to display' as content." ) lines.append("(nothing to display)") for line in lines: try: paginator.add_line(line, empty=empty) except Exception: logger.exception(f"Failed to add line to paginator: '{line}'.") raise logger.debug(f"Paginator created with {len(paginator.pages)} pages.") # Set embed description to content of current page. embed.description = paginator.pages[current_page] if len(paginator.pages) <= 1: if footer_text: embed.set_footer(text=footer_text) if url: embed.url = url logger.debug("Less than two pages, skipping pagination.") await ctx.send(embed=embed) return else: if footer_text: embed.set_footer( text= f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})" ) else: embed.set_footer( text=f"Page {current_page + 1}/{len(paginator.pages)}") if url: embed.url = url message = await ctx.send(embed=embed) logger.debug("Adding emoji reactions to message...") for emoji in PAGINATION_EMOJI: # Add all the applicable emoji to the message await message.add_reaction(emoji) logger.debug("Successfully added all pagination emojis to message.") while True: try: reaction, user = await ctx.bot.wait_for("reaction_add", timeout=timeout, check=event_check) logger.trace(f"Got reaction: {reaction}.") except asyncio.TimeoutError: logger.debug("Timed out waiting for a reaction.") break # We're done, no reactions for the last 5 minutes if str(reaction.emoji) == DELETE_EMOJI: logger.debug("Got delete reaction.") await message.delete() return if reaction.emoji == FIRST_EMOJI: await message.remove_reaction(reaction.emoji, user) current_page = 0 logger.debug( f"Got first page reaction - changing to page 1/{len(paginator.pages)}." ) embed.description = paginator.pages[current_page] if footer_text: # Current page is zero index based. embed.set_footer( text= f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})" ) else: embed.set_footer( text=f"Page {current_page + 1}/{len(paginator.pages)}") await message.edit(embed=embed) if reaction.emoji == LAST_EMOJI: await message.remove_reaction(reaction.emoji, user) current_page = len(paginator.pages) - 1 logger.debug( f"Got last page reaction - changing to page {current_page + 1}/{len(paginator.pages)}" ) embed.description = paginator.pages[current_page] if footer_text: embed.set_footer( text= f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})" ) else: embed.set_footer( text=f"Page {current_page + 1}/{len(paginator.pages)}") await message.edit(embed=embed) if reaction.emoji == LEFT_EMOJI: await message.remove_reaction(reaction.emoji, user) if current_page <= 0: logger.debug( "Got previous page reaction while they are on the first page, ignoring." ) continue current_page -= 1 logger.debug( f"Got previous page reaction - changing to page {current_page + 1}/{len(paginator.pages)}" ) embed.description = paginator.pages[current_page] if footer_text: embed.set_footer( text= f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})" ) else: embed.set_footer( text=f"Page {current_page + 1}/{len(paginator.pages)}") await message.edit(embed=embed) if reaction.emoji == RIGHT_EMOJI: await message.remove_reaction(reaction.emoji, user) if current_page >= len(paginator.pages) - 1: logger.debug( "Got next page reaction while they are on the last page, ignoring." ) continue current_page += 1 logger.debug( f"Got next page reaction - changing to page {current_page + 1}/{len(paginator.pages)}" ) embed.description = paginator.pages[current_page] if footer_text: embed.set_footer( text= f"{footer_text} (Page {current_page + 1}/{len(paginator.pages)})" ) else: embed.set_footer( text=f"Page {current_page + 1}/{len(paginator.pages)}") await message.edit(embed=embed) logger.debug("Ending pagination and clearing reactions.") with suppress(disnake.NotFound): await message.clear_reactions()
async def on_message(self, msg: Message): # check if channel is in blacklist, has possible log urls, or an attachment if msg.channel.id in self.channel_blacklist: return if not msg.attachments and not any(lh in msg.content for lh in self._log_hosts): return # list of candidate tuples consisting of (raw_url, web_url) log_candidates = [] # message attachments for attachment in msg.attachments: if attachment.url.endswith('.txt'): # collisions are possible here, but unlikely, we'll see if it becomes a problem if not self.limiter.is_limited(attachment.filename): log_candidates.append(attachment.url) else: logger.debug( f'{msg.author} attempted to upload a rate-limited log.' ) # links in message for part in [p.strip() for p in msg.content.split()]: if any(part.startswith(lh) for lh in self._log_hosts): if 'obsproject.com' in part: url = part elif 'hastebin.com' in part: hastebin_id = part.rsplit('/', 1)[1] if not hastebin_id: continue url = f'https://hastebin.com/raw/{hastebin_id}' elif 'pastebin.com' in part: pastebin_id = part.rsplit('/', 1)[1] if not pastebin_id: continue url = f'https://pastebin.com/raw/{pastebin_id}' else: continue if self.bot.is_supporter( msg.author) or not self.limiter.is_limited(url): log_candidates.append(url) else: logger.debug( f'{msg.author} attempted to post a rate-limited log.') if not log_candidates: return if len(log_candidates) > 3: logger.debug('Too many log url candidates, limiting to first 3') log_candidates = log_candidates[:3] async def react(emote): try: await msg.add_reaction(emote) except Exception as e: logger.warning(f'Adding reaction failed with "{repr(e)}') for log_url in log_candidates: # download log for local analysis try: log_content = await self.download_log(log_url) break except ValueError: # not a valid OBS log continue except (ClientResponseError, TimeoutError): # file download failed logger.error(f'Failed retrieving log from "{log_url}"') await react(self._log_download_failed) except Exception as e: # catch everything else logger.error( f'Unhandled exception when downloading log: {repr(e)}') else: return async with msg.channel.typing(): log_analysis = None try: # fetch log analysis from OBS analyser log_analysis = await self.fetch_log_analysis(log_url) except ValueError: logger.error(f'Analyser result for "{log_url}" is invalid.') except ClientResponseError: # file download failed logger.error( f'Failed retrieving log analysis from "{log_url}"') except TimeoutError: # analyser failed to respond logger.error(f'Analyser timed out for log file "{log_url}"') except Exception as e: # catch everything else logger.error( f'Unhandled exception when analysing log: {repr(e)}') finally: if not log_analysis: return await react(self._log_analyser_failed) anal_url = f'https://obsproject.com/tools/analyzer?log_url={urlencode(log_url)}' embed = Embed(colour=Colour(0x5a7474), url=anal_url) def pretty_print_messages(msgs): ret = [] for _msg in msgs: ret.append(f'- {_msg}') return '\n'.join(ret) if log_analysis['critical']: embed.add_field(name="đ Critical", value=pretty_print_messages( log_analysis['critical'])) if log_analysis['warning']: embed.add_field(name="â ī¸ Warning", value=pretty_print_messages( log_analysis['warning'])) if log_analysis['info']: embed.add_field(name="âšī¸ Info", value=pretty_print_messages( log_analysis['info'])) # do local hardware check/stats collection and include results if enabled hw_results = await self.match_hardware(log_content) if self.bot.state.get('hw_check_enabled', False): if hardware_check_msg := self.hardware_check(hw_results): embed.add_field(name='Hardware Check', inline=False, value=' / '.join(hardware_check_msg)) # include filtered log in case SE or FTL spam is detected if 'obsproject.com' in log_url and any( elem in log_content for elem in self._filtered_log_needles): clean_url = log_url.replace('obsproject.com', 'obsbot.rodney.io') embed.description = f'*Log contains debug messages (browser/ftl/etc), ' \ f'for a filtered version [click here]({clean_url})*\n' row = ActionRow() row.add_button(style=ButtonStyle.link, label='Solutions / Full Analysis', url=anal_url) return await msg.channel.send(embed=embed, reference=msg, mention_author=True, components=row)