示例#1
0
 async def gun(self, ctx):
   args = ctx.message.content.split()
   prefix = '$'
   msg = f'**Argument #1**: Gun Name \n**Example**: `{prefix}gun scar-h`\n**NOTE**: Typing an incorrect gun will result in a list of valid guns.'
   arg_count = len(args) - 1
   if arg_count != self.args:
    await ctx.send(f'**{self.name}** command only takes an argument count of **{self.args}**\n{msg}')
   else:
     gun_name = args[1]
     async with aiohttp.ClientSession() as session:
       async with session.get(self.url) as r:
         try:
           assert r.status == 200
         except:
           ctx.send('Bad Web Request to the surviv.io API.')
         unparsed =  await r.read()
     try:
       b = soupify(unparsed, 'html.parser').find_all('table', {'class': 'article-table'})[1].find_all('tr')
     except:
       await ctx.send("Looks like the bot couldn't find what you were looking for.")
     for i in range(len(b)):
       imAge = b[i].find('img')
       b[i] = b[i].find('a')
     guns = {}
     for i in b:
       if i != None:
           guns[i.text] = i['href']
     gun_dict = guns.copy()
     for b in guns:
       if b == 'M9':
         break
       else:
         del gun_dict[b]
     for i in gun_dict.keys():
       if gun_name.lower() == i.lower():
         act_gun = i
         self.in_guns = True
     if not self.in_guns:
       big_concat = ''
       for i in gun_dict.keys():
         if big_concat == '':
           big_concat += i
         else:
           place = ', ' + i
           big_concat += place
       embed = discord.Embed(description=f'**"{gun_name}"** is not a valid gun in **surviv.io**. \n \n  **Valid Guns**: ' + big_concat, color=0x00b037)
       await ctx.send(embed=embed)
     else:
       async with aiohttp.ClientSession() as session:
         async with session.get('https://survivio.fandom.com/wiki/' + act_gun) as r:
           content = await r.read()
       html = soupify(content, 'html.parser')
       fire_delay = html.find('div', {'data-source': 'fireDelay'}).text
       rel_time = html.find('div', {'data-source': 'reloadTime'}).text
       spread = html.find('div', {'data-source': 'shotSpread'}).text
       damage = html.find('div', {'data-source': 'dmg'}).text
       embed = discord.Embed(title= f"{act_gun}'s Stats", description=f'**Bullet Damage**: {damage} \n **Shot Spread**: {spread} \n **Reload Time**: {rel_time} \n **Firing Delay**: {fire_delay}', color=0x00b037)
       await ctx.send(embed=embed)
示例#2
0
def get_ipa(bot, nick, chan, arg):
    """ ipa word [lang] -> Get the IPA for a word (in language lang) on wiktionary """
    if not arg:
        return bot.msg(chan, get_doc())
    term = arg.split()[0]
    try:
        lang = arg.split()[1]
    except IndexError:
        lang = "English"

    url = ("http://en.wiktionary.org/w/api.php?format=json&action=parse&prop=text&page=%s&redirects="
        % (term))

    response = requests.get(url, headers=headers)
    data = response.json()

    if data.get("error", None):
        return bot.msg(chan, "%s: Error: %s" % (nick, data['error']['info']))

    soup = soupify(data['parse']['text']['*'])
    try:
        span = soup.find("span", {"id": lang.title()}).findNext("span", {"class": "IPA"})
    except AttributeError:
        traceback.print_exc()
        return bot.msg(chan, "%s: Could not find language in page." % (nick))

    if not span or 'rhymes' in span.find_previous("li").text.lower():
        return bot.msg(chan, "%s: Error: could not find the IPA" % (nick))

    output = "%s: IPA of %s in language %s: %s" % (nick, term, lang, span.text)

    bot.msg(chan, output)
    return output
示例#3
0
 async def update(self, ctx):
     id = ctx.message.guild.id
     conn = await aiosqlite.connect("servers.db")
     c = await conn.cursor()
     await c.execute("select prefix from servers where name = ?", [str(id)])
     prefix = await c.fetchall()
     prefix = prefix[0][0]
     msg = f"**Example**: `{prefix}update`"
     args = ctx.message.content.split()
     arg_count = len(args) - 1
     if arg_count != self.args:
         await ctx.send(
             f"**{self.name}** command only takes an argument count of **{self.args}** \n{msg}"
         )
     else:
         try:
             async with aiohttp.ClientSession() as session:
                 async with session.get(self.url) as r:
                     raw = await r.read()
         except:
             ctx.send(
                 "Failed to connect to surviv.io website. Log an issue with the issue command"
             )
         html = soupify(raw, "html.parser")
         news_wrapper = html.find("div", {"id": "news-current"})
         date = news_wrapper.find("small").text
         title = news_wrapper.find("strong").text
         title = f"⏫ {title} ({date}) ⏫"
         desc = news_wrapper.findAll("p")[1].text
         print(title)
         print(desc)
         embed = discord.Embed(title=title, description=desc, color=0x00B037)
         await ctx.send(embed=embed)
示例#4
0
 def fill_metadata(self):
     r = requests.get("%s&page=submissions" % (self.url))
     status = r.status_code // 100
     if status == 2:
         self.p = soupify(r.content, features="html.parser")
     elif status == 4:
         raise IOError("Client Error %s" % (r.status_code))
     elif status == 5:
         raise IOError("Server Error %s" % (r.status_code))
     else:
         raise IOError("Unidentified Error %s" % (r.status_code))
示例#5
0
def wikipedia_get(bot, nick, chan, arg, root=None):
    """ wiki <page> -> Get the first two sentences in a wikipedia article. """
    if not arg:
        return bot.msg(chan, get_doc())
    name = arg
    if ' (' in arg:
        name = arg.split()[0]
    term = arg.replace(" ", "_")
    term = urlencode(term)

    url = root or "http://en.wikipedia.org"
    url += ("/w/api.php?action=parse&format=json&prop=text&page=%s&redirects=" % (term))

    response = requests.get(url, headers=headers)
    res = response.json()
    if res.get("error", None):
        return bot.msg(chan, "%s: Error: %s" % (nick, res['error']['info']))

    soup = soupify(res['parse']['text']['*'], "lxml")
    paragraph = soup.find('p')
    url = "http://en.wikipedia.org/wiki/%s"
    htmlurl = url % term

    for i in soup.find_all('b'):
        i.string = "%s" % (bot.hicolor(i.text))
    
    if soup.find("table", id="disambigbox") is not None:
        bot.msg(chan, "%s (%s) points to a disambiguation page." % (arg, shorten(htmlurl)))
        return

    if res['parse'].get('redirects', None):
        if res['parse']['redirects'][0].get("tofragment", None):
            anchor = res['parse']['redirects'][0]['tofragment']
            paragraph = soup.find("span", {"id": anchor}).findNext("p")
            htmlurl = url % (res['parse']['redirects'][0]['to']) + "#%s" % (anchor)
        elif res['parse']['redirects'][0].get("to", None):
            htmlurl = url % (res['parse']['redirects'][0]['to'].replace(" ", "_"))
    sentences = bot.state.data["sentence_re"].findall(paragraph.text)[:2]
    readmore = bot.style.color("\u2192 %s\x0f" % (bot.state.data['shortener'](bot, htmlurl)), color="blue")
    text = ''.join(sentences)
    if re.search("\[\d+\]", text):
        text = re.sub("\[\d+\]", "", text)
    output = "%s %s" % (text, readmore)

    bot.msg(chan, '\n'.join(lines(output)))
    time.time()
示例#6
0
    def get_text(self):
        if not self.num_pages:
            self.fill_metadata()
        if not self.text:
            soups = []
            for x in range(1, self.num_pages + 1):
                print("Handling (%s) page %d/%d" %
                      (self.get_title(), x, self.num_pages))
                resp = requests.get("%s?page=%s" % (self.url, x))
                html = resp.content
                s = soupify(html, features="html.parser")
                soups.append(s)

            # WARNING: hard coded class name
            tofind = 'b-story-body-x x-r15'
            t = [soup.find('div', {'class': tofind}) for soup in soups]
            self.text = [str(s) for s in t]
        return self.text
示例#7
0
    def cache_first_page(self):
        '''
        Caches the first page of a story.

        You don't even understand how f*****g useful this is.

        :raises: IOError
        '''
        r = requests.get(self.url)
        status = r.status_code // 100
        if status == 2:
            self.fp = soupify(r.content, features="html.parser")
        elif status == 4:
            raise IOError("Client Error %s" % (r.status_code))
        elif status == 5:
            raise IOError("Server Error %s" % (r.status_code))
        else:
            raise IOError("Unidentified Error %s" % (r.status_code))
示例#8
0
文件: weather.py 项目: bqv/sadaharu
def getweather(location):
    try:
        url = "http://woeid.rosselliot.co.nz/lookup/"
        loc = location
        qry = urlencode(loc.lower().replace(",", " "))
        req = url+qry
        rsp = requests.get(req)
        bsp = soupify(rsp.text, "lxml")
        tbl = bsp.findAll("table")[0]
        ids = [x.attrs for x in list(tbl)[1:]]

        for x in ids[0:1]:
            w = Weather(x)
            if 'condition' in w.__dict__.keys():
                yield w.form()
            else:
                print(w.__dict__)
    except Exception as e:
        yield "NoSuchPlaceError: '%s' not found (on earth)" %(loc)
示例#9
0
 async def updates(self, ctx):
     prefix = '$'
     msg = f'**Example**: `{prefix}updates`'
     args = ctx.message.content.split()
     arg_count = len(args) - 1
     if arg_count != self.args:
         await ctx.send(
             f'**{self.name}** command only takes an argument count of **{self.args}** \n{msg}'
         )
     else:
         try:
             async with aiohttp.ClientSession() as session:
                 async with session.get(self.url) as r:
                     raw = await r.read()
         except:
             ctx.send('Failed to connect to surviv.io website.')
         html = soupify(raw, 'html.parser')
         news_wrapper = html.find('div', {'id': 'news-current'})
         tags = news_wrapper.find_all('p', {'class': 'news-paragraph'})
         tags2 = news_wrapper.find('small').text
         tags3 = []
         for i in tags:
             tags3.append(i.text)
     #if len(tags3[0]) > 30:
     #description = tags3[0]
     #title = f'⏫({tags2})⏫'
     #else:
         title = f'{tags3[0]}'  #({tags2})'
         del tags3[0]
         description = ' \n \n '.join(tags3)
         desc = f'{description}'
         print(desc)
         embed = discord.Embed(title=title,
                               description=desc,
                               color=0x00b037)
         await ctx.send(embed=embed)
示例#10
0
 async def gun(self, ctx):
     args = ctx.message.content.split()
     id = ctx.message.guild.id
     conn = await aiosqlite.connect("servers.db")
     c = await conn.cursor()
     await c.execute("select prefix from servers where name = ?", [str(id)])
     prefix = await c.fetchall()
     prefix = prefix[0][0]
     msg = f"**Argument #1**: Gun Name \n**Example**: `{prefix}gun scar-h`\n**NOTE**: Typing an incorrect gun will result in a list of valid guns."
     arg_count = len(args) - 1
     if arg_count != self.args:
         await ctx.send(
             f"**{self.name}** command only takes an argument count of **{self.args}**\n{msg}"
         )
     else:
         gun_name = args[1]
         async with aiohttp.ClientSession() as session:
             async with session.get(self.url) as r:
                 try:
                     assert r.status == 200
                 except:
                     ctx.send(
                         "Bad Web Request to the surviv.io API. Log on issue with the **issue command**."
                     )
                 unparsed = await r.read()
         try:
             b = (soupify(unparsed, "html.parser").find_all(
                 "table", {"class": "article-table"})[1].find_all("tr"))
             print(b)
         except:
             await ctx.send(
                 "Looks like the program couldn't find what you were looking for. Please **log an issue description** with the **issue command**."
             )
         for i in range(len(b)):
             b[i] = b[i].find("a")
         guns = {}
         for i in b:
             if i != None:
                 guns[i.text] = i["href"]
         gun_dict = guns.copy()
         for b in guns:
             if b == "M9":
                 break
             else:
                 del gun_dict[b]
         for i in gun_dict.keys():
             if gun_name.lower() == i.lower():
                 print(gun_name, i)
                 act_gun = i
                 self.in_guns = True
         if not self.in_guns:
             big_concat = ""
             for i in gun_dict.keys():
                 if big_concat == "":
                     big_concat += i
                 else:
                     place = ", " + i
                     big_concat += place
             embed = discord.Embed(
                 description=
                 f'**"{gun_name}"** is not a valid gun in **surviv.io**. \n \n  **Valid Guns**: '
                 + big_concat,
                 color=0x00B037,
             )
             await ctx.send(embed=embed)
         else:
             async with aiohttp.ClientSession() as session:
                 async with session.get(
                         "https://survivio.fandom.com/wiki/" +
                         act_gun) as r:
                     content = await r.read()
             html = soupify(content, "html.parser")
             fire_delay = html.find("div", {
                 "data-source": "fireDelay"
             }).text
             rel_time = html.find("div", {"data-source": "reloadTime"}).text
             spread = html.find("div", {"data-source": "shotSpread"}).text
             damage = html.find("div", {"data-source": "dmg"}).text
             embed = discord.Embed(
                 title=f"{act_gun}'s Stats",
                 description=
                 f"**Bullet Damage**: {damage} \n **Shot Spread**: {spread} \n **Reload Time**: {rel_time} \n **Firing Delay**: {fire_delay}",
                 color=0x00B037,
             )
             await ctx.send(embed=embed)
示例#11
0
def wiki_get(bot, nick, chan, arg, searchkey=None, api_url=None):
    """ wiki <page> -> Get a summary of a wikipedia article. """
    if not arg:
        return bot.msg(chan, get_doc())

    searchkey = searchkey or arg

    # Get the article name, so we can highlight it later in the function
    article_name = arg
    if (' (' in arg):
        article_name = arg.split()[0]

    term = urlencode(arg.replace(" ", "_"))

    url = (api_url or "https://en.wikipedia.org/w/api.php%s") % ("?action=parse&format=json&prop=text&redirects=&page=")
    url += term

    response = requests.get(url, headers=headers).json()

    if response.get("error", None):
        if response['error']['code'] == "missingtitle":
            # the page we specified does not exist. We'll guess on the basis of search, then.
            return wiki_get(bot, nick, chan, wiki_search(arg, url=api_url)[0], searchkey=arg, api_url=api_url)
        else:
            return bot.msg(chan, "Error: %s" % (response['error']['info']))

    soup = soupify(response['parse']['text']['*'], "lxml")

    # Is the page a disambiguation page? If so, search for the page title and get other pages with similar names

    if soup.find("table", id="disambigbox") is not None:
        return wiki_get(bot, nick, chan, wiki_search(arg, url=api_url)[0], searchkey=arg, api_url=api_url)

    # Get the paragraph and the html url from the soup

    paragraph = get_paragraph(response, soup)
    html_url = get_html_url(response, soup, term)
    print(html_url)

    # First, highlight all of the bold terms in the page
    for i in soup.find_all("b"):
        i.string = "%s" % (bot.hicolor(i.text))

    # Get the sentences
    sentences = ''.join(bot.state.data["sentence_re"].findall(paragraph.text)[:2])

    # Replace footnotes in the sentences
    sentences = re.sub("\[\d+\]", "", sentences)

    # Get other search results
    results = wiki_search(searchkey)[1:4]

    # Get a shortlink
    short_link = bot.state.data["shortener"](bot, html_url)

    output = "%s %s" % (sentences, bot.style.color("\u2192%s" % short_link, "blue"))

    bot.msg(chan, output)

    # other possible pages
    bot.msg(chan, bot.style.color("Did you mean: %s" % (' - '.join(results)), "gray"))
示例#12
0
async def scrape_via_bs4(bot: RiptideBot):
    await bot.wait_until_ready()
    # Start Surviv Scrape
    try:
        surviv_conn = await aiosqlite.connect('data/rotating/surviv.db')
        surviv_cur = await surviv_conn.cursor()
        await surviv_cur.execute("""CREATE TABLE IF NOT EXISTS GUNS(
        NAME TEXT,
        DAMAGE REAL,
        SPREAD REAL,
        RELOAD REAL,
        DELAY REAL)""")  # create table if not exists for reach thing
        await surviv_conn.commit()
        await surviv_cur.execute("""CREATE TABLE IF NOT EXISTS MELEE(
        NAME TEXT,
        SPEED REAL,
        DAMAGE REAL,
        RADIUS REAL,
        COOLDOWN REAL,
        AUTO REAL)""")
        await surviv_conn.commit()
        await surviv_cur.execute("""CREATE TABLE IF NOT EXISTS NEW(
        TITLE TEXT,
        TIME TEXT,
        CHANGE TEXT)""")
        await surviv_conn.commit()

        # get current surviv.io update
        async with bot._session.get("http://surviv.io/") as r:
            raw = await r.read()
        html = soupify(raw, "html.parser")
        current_news = html.find("div", {"id": "news-current"})
        current_title = current_news.find('strong').text
        current_date = current_news.find('small').text
        body = []
        for p in current_news.findAll('p', {"class": 'news-paragraph'})[1:]:
            inner_html = p.decode_contents()
            inner_html = inner_html.replace(
                '<span class="highlight">',
                '**').replace('</span>',
                              '**')  # make highlighted portions bold
            body.append(inner_html)
        content = "\n\n".join(body)
        await surviv_cur.execute("""select * from new""")
        rows = await surviv_cur.fetchall()
        if len(rows) == 0:
            await surviv_cur.execute(
                """insert into new values ('placeholder', 'placeholder', 'placeholder')"""
            )
        # put the new update in the table
        await surviv_cur.execute(
            """UPDATE NEW
                                 SET CHANGE = ?,
                                 TITLE = ?,
                                 TIME = ?""",
            [content, current_title, current_date])
        await surviv_conn.commit()

        # get the stats of melee in surviv.io
        await surviv_cur.execute("""delete from melee""")
        await surviv_conn.commit()
        url = "https://survivio.fandom.com/wiki/Melee_Weapons"
        async with bot._session.get(url) as r:
            raw_melee = await r.read()
        html = soupify(raw_melee, 'html.parser')
        weapons_html = html.find('table', {'class': 'article-table'})
        weapons = weapons_html.find_all('tr')
        for w in weapons[1:]:
            wep_link = w.find_all("a")[0]["href"]
            wep_name = w.find_all("a")[0].text
            async with bot._session.get("https://survivio.fandom.com/wiki/" +
                                        wep_name) as r:
                unparsed = await r.read()
            html = soupify(unparsed, 'html.parser')
            html = soupify(unparsed, "html.parser")
            # quick
            # could've found a better way with thought
            try:
                equip_speed = html.find("div", {
                    "data-source": "equipSpeed"
                }).text
            except:
                equip_speed = "N/A"
            try:
                damage = html.find("div", {"data-source": "damage"}).text
            except:
                damage = "N/A"
            try:
                rad = html.find("div", {"data-source": "rad"}).text
            except:
                rad = "N/A"
            try:
                cltime = html.find("div", {"data-source": "cooldownTime"}).text
            except:
                cltime = "N/A"
            try:
                auto = html.find("div", {"data-source": "autoAttack"}).text
            except:
                auto = "N/A"
            await surviv_cur.execute(
                """insert into melee values (?, ?, ?, ?, ?, ?)""",
                [wep_name.lower(), equip_speed, damage, rad, cltime, auto])
            await surviv_conn.commit()

        # get the stats of guns in surviv.io
        await surviv_cur.execute("""delete from guns""")
        await surviv_conn.commit()
        url = "https://survivio.fandom.com/wiki/Weapons"
        async with bot._session.get(url) as r:
            unparsed = await r.read()
        gun_list = (soupify(unparsed, "html.parser").find_all(
            "table", {"class": "article-table"})[1].find_all("tr"))[1:]
        for idx, g in enumerate(gun_list):
            gun_list[idx] = (g.find("a")["href"], g.find("a").text)
        for idx, n in enumerate(gun_list):
            if n[1] == 'M9':
                gun_list = gun_list[idx:]
                break
        for p in gun_list:
            if 'http' in p[0]:
                url = p[0]
            else:
                url = "https://survivio.fandom.com" + p[0]
            async with bot._session.get(url) as r:
                g_unparsed = await r.read()
                html = soupify(g_unparsed, "html.parser")
                try:
                    fire_delay = html.find("div", {
                        "data-source": "fireDelay"
                    }).text
                except:
                    fire_delay = 'N/A'
                try:
                    rel_time = html.find("div", {
                        "data-source": "reloadTime"
                    }).text
                except:
                    rel_time = 'N/A'
                try:
                    spread = html.find("div", {
                        "data-source": "shotSpread"
                    }).text
                except:
                    spread = 'N/A'
                try:
                    damage = html.find("div", {"data-source": "dmg"}).text
                except:
                    damage = 'N/A'
                await surviv_cur.execute(
                    """insert into guns values (?, ?, ?, ?, ?)""",
                    [p[1], damage, spread, rel_time, fire_delay])
                await surviv_conn.commit()

        # Do Fortnite Scraping

        AssessLogger.log('Finished Routine Scrape.')
        # close connection
        await surviv_conn.close()
    except Exception as e:
        ErrorLogger.log(
            f'Failed get new data. Raised {traceback.format_exc()}')
        AssessLogger.log(f'Failed get new data.')
        wrapup_all()
示例#13
0
文件: weather.py 项目: bqv/sadaharu
 def __init__(self, x):
     url = "http://weather.yahooapis.com/forecastrss?u=f&w="
     self.woeid = x['data-woeid']
     self.location = ', '.join([x['data-country'],x['data-district_county'],x['data-city'],x['data-province_state']])
     self.latlong = "[%s, %s]" %(x['data-center_lat'],x['data-center_long'])
     self.forecast = []
     soup = soupify(requests.get(url+x['data-woeid']).text, "lxml")
     print(soup)
     for z in soup.findAll():
         if z.name.startswith("yweather"):
             if z.name.endswith("location"):
                 pass
             elif z.name.endswith("units"):
                 assert z.attrs['distance'] == "mi" # mi * 1.609344 = km
                 assert z.attrs['pressure'] == "in" # ppsi * 68.9475729 = mB
                 assert z.attrs['speed'] == "mph" # mph * 1.609344 = km/h, mph * 0.44704 = m/s
             elif z.name.endswith("wind"):
                 try:
                     c_chill = "%d\xb0C" %(round(((int(z.attrs['chill']) - 32) * 5) / 9))
                     f_chill = "%s\xb0F" %(z.attrs['chill'])
                     self.chill = "%s (%s)" %(c_chill, f_chill)
                 except ValueError:
                     self.chill = None
                 try:
                     self.direction = ["N", "NNE", "NE", "ENE", "E", "ESE", "SE", "SSE", "S", "SSW", "SW", "WSW", "W", "WNW", "NW", "NNW"][round(int(z.attrs['direction'])/22.5)%16]
                 except ValueError:
                     self.direction = None
                 try:
                     kmph_speed = "%dkm/h" %(round(int(z.attrs['speed']) * 1.609344))
                     mps_speed = "%dm/s" %(round(int(z.attrs['speed']) * 0.44704))
                     mph_speed = "%smph" %(z.attrs['speed'])
                     self.wspeed = "%s (%s,%s)" %(kmph_speed, mph_speed, mps_speed)
                 except ValueError:
                     self.wspeed = None
             elif z.name.endswith("atmosphere"):
                 try:
                     self.humidity = "%s%%"%(z.attrs['humidity'])
                 except ValueError:
                     self.humidity = None
                 try:
                     mb_pres = "%dmbar"%(int(z.attrs['pressure'])*68.9475729)
                     ppsi_pres = "%spsi"%(z.attrs['pressure'])
                     self.pres = "%s (%s)" %(mb_pres, ppsi_pres)
                 except ValueError:
                     self.pres = None
                 try:
                     self.rising = ["steady", "rising", "falling"][int(z.attrs['rising'])]
                 except ValueError:
                     self.rising = None
                 try:
                     mi_visibility = "%d miles"%(int(z.attrs['visibility'])/100)
                     km_visibility = "%dkm"%(int(z.attrs['visibility'])*0.01609344)
                     self.visib = "%s (%s)" %(km_visibility, mi_visibility)
                 except ValueError:
                     self.visib = None
             elif z.name.endswith("astronomy"):
                 if z.attrs['sunrise'] != "":
                     self.sunrise = z.attrs['sunrise']
                 else:
                     self.sunrise = None
                 if z.attrs['sunset'] != "":
                     self.sunset = z.attrs['sunset']
                 else:
                     self.sunset = None
             elif z.name.endswith("condition"):
                 try:
                     c_temp = "%d\xb0C" %(round(((int(z.attrs['temp']) - 32) * 5) / 9))
                     f_temp = "%s\xb0F" %(z.attrs['temp'])
                     self.temp = "%s (%s)" %(c_temp, f_temp)
                 except:
                     self.temp = None
                 self.condition = "%s" %(z.attrs['text'])
             elif z.name.endswith("forecast"):
                     self.forecast.append(z.attrs)
示例#14
0
    async def melee(self, ctx):
        id = ctx.message.guild.id
        conn = await aiosqlite.connect("servers.db")
        c = await conn.cursor()
        await c.execute("select prefix from servers where name = ?", [str(id)])
        prefix = await c.fetchall()
        prefix = prefix[0][0]
        args = ctx.message.content.split()
        arg_count = len(args) - 1
        msg = f"**Example**: `{prefix}melee katana`"
        if arg_count != self.args:
            await ctx.send(
                f"**{self.name}** command only takes an argument count of **{self.args}** \n{msg}"
            )
        else:
            weapon = args[1]
            async with aiohttp.ClientSession() as session:
                async with session.get(self.url) as r:
                    unparsed = await r.read()
            wep_finder = soupify(unparsed, "html.parser")
            weapons_html = wep_finder.find("table", {"class": "article-table"})
            print(weapons_html)
            wep2 = weapons_html.find_all("tr")
            wep3 = []
            wep_dict = {}
            for i in wep2:
                b = i.find_all("a")
                if len(b) > 0:
                    wep_dict[i.find_all("a")[0].text] = i.find_all(
                        "a")[1]["href"]
                    wep3.append(i.find_all("a")[0])
            in_list = False
            for i in wep_dict.keys():
                plac = i.lower()
                plac1 = weapon.lower()
                if plac == plac1:
                    act_weapon = i
                    in_list = True
                else:
                    continue
            if in_list:
                async with aiohttp.ClientSession() as session:
                    async with session.get(
                            "https://survivio.fandom.com/wiki/" +
                            act_weapon) as r:
                        unparsed = await r.read()
                html = soupify(unparsed, "html.parser")
                equip_speed = html.find("div", {
                    "data-source": "equipSpeed"
                }).text
                damage = html.find("div", {"data-source": "damage"}).text
                rad = html.find("div", {"data-source": "rad"}).text
                cltime = html.find("div", {"data-source": "cooldownTime"}).text
                auto = html.find("div", {"data-source": "autoAttack"}).text
                embed = discord.Embed(
                    title=f"{act_weapon} Stats",
                    description=
                    f"**Damage**: {damage} \n **Attack Radius**: {rad} \n **Equip Speed**: {equip_speed} \n **Cooldown Time**: {cltime} \n **Auto Attack**: {auto}",
                    color=0x00B037,
                )
                embed = embed = discord.Embed(
                    title=f"{act_weapon} Stats",
                    description=
                    f"**Damage**: {damage} \n **Attack Radius**: {rad} \n **Equip Speed**: {equip_speed} \n **Cooldown Time**: {cltime} \n **Auto Attack**: {auto} \n",
                    color=0x00B037,
                )
                await ctx.send(embed=embed)

            else:
                # ', '.join() is much better
                # I'll fix it later
                big_concat = ""
                for i in wep_dict.keys():
                    if big_concat == "":
                        big_concat += i
                    else:
                        place = ", " + i
                        big_concat += place
                        embed = discord.Embed(
                            description=
                            f'**"{weapon}"** is not a valid weapon in **surviv.io**. \n \n  **Valid Weapons**: '
                            + big_concat,
                            color=0x00B037,
                        )
                await ctx.send(embed=embed)