Esempio n. 1
0
    def excuse(self):
        try:
            html = Browser('http://developerexcuses.com')
        except:
            return 'You\'re on your own this time bud'

        parsed = html.soup()
        return parsed.a.text
Esempio n. 2
0
    def excuse(self):
        try:
            html = Browser('http://developerexcuses.com')
        except:
            return 'You\'re on your own this time bud'

        parsed = html.soup()
        return parsed.a.text
Esempio n. 3
0
    def fml(self):

        #return "The FML api is discontinued and I don't feel like making a site scraper. RIP, whiny teens."

        url = 'http://www.fmylife.com'

        if self.values:
            url += '/search/' + '%20'.join(self.values)
        else:
            url += '/random'

        try:
            request = Browser(url)
            soup = request.soup()
            fml = choice(soup.find('div', {'class': 'infinite-scroll'}).find_all(string=re.compile('Today'))).strip()
            return fml
        except Exception as e:
            return "Nobody's life got f****d like that"
Esempio n. 4
0
    def ety(self):
        if not self.values:
            return "Enter a word"

        word = self.values[0]
        params = {
            'allowed_in_frame': '0',
            'searchmode': 'term',
            'search': word
        }

        request = Browser("http://www.etymonline.com/index.php", params)
        if not request:
            return 'Error'

        cont = request.soup()

        heads = cont.findAll("dt")
        defs = cont.findAll("dd")

        if not len(defs):
            return "Couldn't find anything"

        try:
            ord = int(self.values[1])
        except:
            ord = 1

        if ord > len(defs):
            ord = 1

        ord -= 1
        if ord < 0:
            ord = 0

        try:
            _word = ''.join(heads[ord].findAll(text=True))
            _def = ''.join(defs[ord].findAll(text=True))
        except Exception as e:
            self.chat('Failed to parse.', error=e)
            return

        return "Etymology %s of %s for %s: %s" % (str(ord + 1), str(
            len(defs)), _word, _def)
Esempio n. 5
0
    def ety(self):
        if not self.values:
            return "Enter a word"

        word = self.values[0]
        params = {'allowed_in_frame': '0', 'searchmode': 'term', 'search': word}

        request = Browser("http://www.etymonline.com/index.php", params)
        if not request:
            return 'Error'

        cont = request.soup()

        heads = cont.findAll("dt")
        defs = cont.findAll("dd")

        if not len(defs):
            return "Couldn't find anything"

        try:
            ord = int(self.values[1])
        except:
            ord = 1

        if ord > len(defs):
            ord = 1

        ord -= 1
        if ord < 0:
            ord = 0

        try:
            _word = ''.join(heads[ord].findAll(text=True))
            _def = ''.join(defs[ord].findAll(text=True))
        except Exception as e:
            self.chat('Failed to parse.', error=e)
            return

        return "Etymology %s of %s for %s: %s" % (str(ord + 1), str(len(defs)), _word, _def)
Esempio n. 6
0
    def ud(self):
        if not self.values:
            return "Whatchu wanna know, bitch?"

        term = ' '.join(self.values)
        term = term.strip()

        if term == 'truffle butter':
            return "You all know what it is, and I don't want to have to read this shit again."

        try:
            request = Browser('http://www.urbandictionary.com/define.php',
                               params={'term': term})
            soup = request.soup()
        except:
            return "parse error"

        elem = soup.find('div', {'class': 'meaning'})

        try:
            defn = []
            for string in elem.stripped_strings:
                defn.append(string)
        except:
            return "couldn't find anything"

        if not defn:
            return "couldn't find anything"

        # Unfortunately, BeautifulSoup doesn't parse hexadecimal HTML
        # entities like &#x27; so use the parser for any stray entities.

        response = []
        for paragraph in defn:
            wrapped = textwrap.wrap(paragraph, 200)
            _response = unescape(' '.join(wrapped))
            response.append(_response)

        return ' '.join(response)
Esempio n. 7
0
    def fml(self):

        url = 'http://api.fmylife.com'
        params = {'language': 'en', 'key': self.secrets.fml_api}

        if self.values:
            url += '/view/search'
            params['search'] = "+".join(self.values)
        else:
            url += '/view/random'

        try:
            request = Browser(url, params)
            soup = request.soup()

            if self.values:
                fml = choice(soup.find_all("text")).get_text()
            else:
                fml = soup.find_all("text")[0].get_text()
            return fml
        except Exception as e:
            return "Nobody's life got f****d like that"
Esempio n. 8
0
    def ud(self):
        if not self.values:
            return "Whatchu wanna know, bitch?"

        term = ' '.join(self.values)
        term = term.strip()

        if term == 'truffle butter':
            return "You all know what it is, and I don't want to have to read this shit again."

        try:
            request = Browser('http://www.urbandictionary.com/define.php',
                              params={'term': term})
            soup = request.soup()
        except:
            return "parse error"

        elem = soup.find('div', {'class': 'meaning'})

        try:
            defn = []
            for string in elem.stripped_strings:
                defn.append(string)
        except:
            return "couldn't find anything"

        if not defn:
            return "couldn't find anything"

        # Unfortunately, BeautifulSoup doesn't parse hexadecimal HTML
        # entities like &#x27; so use the parser for any stray entities.

        response = []
        for paragraph in defn:
            wrapped = textwrap.wrap(paragraph, 200)
            _response = unescape(' '.join(wrapped))
            response.append(_response)

        return ' '.join(response)