Esempio n. 1
0
    async def stan(self, ctx):
        """Get a random kpop artist to stan.

        Use >stan update to update the database.
        """
        artist_list = db.get_from_data_json(['artists'])
        if artist_list:
            await ctx.send(f"stan **{random.choice(artist_list)}**")
        else:
            await ctx.send(
                f":warning: Artist list is empty :thinking: Update it with `>stan update`"
            )
Esempio n. 2
0
 def __init__(self, bot):
     self.bot = bot
     self.stfu_regex = re.compile(r"(?:^|\W){0}(?:$|\W)".format("stfu"),
                                  flags=re.IGNORECASE)
     self.statuses = [
         ("watching", lambda: f"{len(self.bot.guilds)} servers"),
         ("listening",
          lambda: f"{len(set(self.bot.get_all_members()))} users"),
         ("playing", lambda: "misobot.xyz"),
     ]
     self.activities = {
         "playing": 0,
         "streaming": 1,
         "listening": 2,
         "watching": 3
     }
     self.current_status = None
     self.status_loop.start()
     self.settings = db.get_from_data_json(["bot_settings"])
Esempio n. 3
0
    async def update(self, ctx):
        """Update the artist database."""
        artist_list_old = db.get_from_data_json(["artists"])
        artist_list_new = set()
        urls_to_scrape = [
            "https://kprofiles.com/k-pop-girl-groups/",
            "https://kprofiles.com/disbanded-kpop-groups-list/",
            "https://kprofiles.com/disbanded-kpop-boy-groups/",
            "https://kprofiles.com/k-pop-boy-groups/",
            "https://kprofiles.com/co-ed-groups-profiles/",
            "https://kprofiles.com/kpop-duets-profiles/",
            "https://kprofiles.com/kpop-solo-singers/",
        ]

        async def scrape(session, url):
            artists = []
            async with session.get(url) as response:
                soup = BeautifulSoup(await response.text(), "html.parser")
                content = soup.find(
                    "div", {"class": "entry-content herald-entry-content"})
                outer = content.find_all("p")
                for p in outer:
                    for artist in p.find_all("a"):
                        artist = artist.text.replace("Profile", "").replace(
                            "profile", "").strip()
                        if not artist == "":
                            artists.append(artist)
            return artists

        tasks = []
        async with aiohttp.ClientSession() as session:
            for url in urls_to_scrape:
                tasks.append(scrape(session, url))

            artist_list_new = set(sum(await asyncio.gather(*tasks), []))

        db.save_into_data_json(["artists"], list(artist_list_new))
        await ctx.send(
            f"**Artist list updated**\n"
            f"New entries: **{len(artist_list_new) - len(artist_list_old)}**\n"
            f"Total: **{len(artist_list_new)}**")
Esempio n. 4
0
def useragent():
    """Returns random user agent to use in web scraping."""
    agents = db.get_from_data_json(["useragents"])
    return random.choice(agents)
Esempio n. 5
0
 async def on_ready(self):
     settings = db.get_from_data_json(['bot_settings'])
     self.logchannel = self.bot.get_channel(settings['log_channel'])