Пример #1
0
def rule34(phenny, input):
    """.rule34 <query> - Rule 34: If it exists there is p**n of it."""

    if input.nick in phenny.config.user_ignore:
        return

    if check_nsfw(phenny, input.sender, None, input.nick):
        return
    q = input.group(2)
    if not q:
        phenny.say(rule34.__doc__.strip())
        return

    try:
        req = web.get(
            'http://rule34.xxx/index.php?page=dapi&s=post&q=index&tags={0}'.
            format(urlquote(q)))  #Lets use XML!
    except (HTTPError, IOError):
        raise GrumbleError(
            "THE INTERNET IS F*****G BROKEN. Please try again later.")

    results = ET.fromstring(req)

    if len(results) <= 0:
        phenny.reply("Huh. rule34.xxx is missing {0}".format(q))
        return

    try:
        link = (choice(results).attrib['file_url'])
    except AttributeError:
        raise GrumbleError("THE INTERNET IS BROKEN. Please try again later.")

    response = '!!NSFW!! -> {0} <- !!NSFW!!'.format(link)
    phenny.reply(response)
Пример #2
0
def apertium_translate(phenny, input):
    '''Translates a phrase using APy.'''
    pairRE = langRE + r'-' + langRE
    line = strict_check(
        r'((?:' + pairRE + r'(?:\|' + pairRE + r')*' + r' ?)+)\s+(.*)',
        input.group(1), apertium_translate)
    if (len(line.group(2)) > 350) and (not input.admin):
        raise GrumbleError('Phrase must be under 350 characters.')

    blocks = line.group(1).split(' ')
    for block in blocks:
        pairs = block.split('|')
        translated = line.group(2)
        for (input_lang, output_lang) in [pair.split('-') for pair in pairs]:
            if input_lang == output_lang:
                raise GrumbleError(
                    'Stop trying to confuse me! Pick different languages ;)')
            try:
                translated = web.decode(
                    translate(phenny, translated, input_lang, output_lang))
            except GrumbleError as err:
                phenny.say('{:s}-{:s}: {:s}'.format(input_lang, output_lang,
                                                    str(err)))
                return
        phenny.reply(web.decode(translated))
Пример #3
0
def strict_check(pattern, string, function):
    if not string:
        raise GrumbleError('Usage: ' + function.example)
    string = re.fullmatch(pattern, string)
    if not string:
        raise GrumbleError('Usage: ' + function.example)
    return string
Пример #4
0
def apertium_listlangs(phenny, input):
    """Lists languages available for translation from/to"""

    opener = urllib.request.build_opener()
    opener.addheaders = headers

    response = opener.open(APIurl + '/listPairs').read()

    langs = json.loads(response.decode('utf-8'))
    if int(langs['responseStatus']) != 200:
        raise GrumbleError(APIerrorHttp %
                           (langs['responseStatus'], langs['responseDetails']))
    if langs['responseData'] == []:
        raise GrumbleError(APIerrorData)

    outlangs = []
    #phenny.say(str(langs))
    for pair in langs['responseData']:
        if pair['sourceLanguage'] not in outlangs:
            outlangs.append(pair['sourceLanguage'])
        if pair['targetLanguage'] not in outlangs:
            outlangs.append(pair['targetLanguage'])
    #phenny.say(str(outlangs))

    extra = "; more info: .listpairs lg"

    first = True
    allLangs = ""
    for lang in outlangs:
        if not first:
            allLangs += ", "
        else:
            first = False
        allLangs += lang
    phenny.say(allLangs + extra)
Пример #5
0
def apertium_perword(phenny, input):
    '''Perform APy's tagger, morph, translate, and biltrans functions on individual words.'''
    cmd, cmd_error = strict_check(r'(' + langRE + r')\s+\((.*)\)\s+(.*)',
                                  input.group(1), apertium_perword)
    valid_funcs = {'tagger', 'disambig', 'biltrans', 'translate', 'morph'}

    if cmd_error:
        raise GrumbleError(cmd_error)
        return

    # validate requested functions
    funcs = cmd.group(2).split(' ')
    if not set(funcs) <= valid_funcs:
        raise GrumbleError(
            'The requested functions must be from the set {:s}.'.format(
                str(valid_funcs)))

    opener = urllib.request.build_opener()
    opener.addheaders = headers

    try:
        response = opener.open(
            '{:s}/perWord?lang={:s}&modes={:s}&q={:s}'.format(
                phenny.config.APy_url, web.quote(cmd.group(1)),
                '+'.join(funcs), web.quote(cmd.group(3)))).read()
    except urllib.error.HTTPError as error:
        handle_error(error)

    jsdata = json.loads(response.decode('utf-8'))
    for word in jsdata:
        phenny.say(word['input'] + ':')
        for func in funcs:
            phenny.say('  {:9s}: {:s}'.format(func, ' '.join(word[func])))
Пример #6
0
def rule34(phenny, input):
    """.rule34 <query> - Rule 34: If it exists there is p**n of it."""

    q = input.group(2)
    if not q:
        phenny.say(rule34.__doc__.strip())
        return

    try:
        req = web.get(
            "http://rule34.xxx/index.php?page=post&s=list&tags={0}".format(
                urlquote(q)))
    except (HTTPError, IOError):
        raise GrumbleError(
            "THE INTERNET IS F*****G BROKEN. Please try again later.")

    doc = lxml.html.fromstring(req)
    doc.make_links_absolute('http://rule34.xxx/')
    thumbs = doc.find_class('thumb')
    if len(thumbs) <= 0:
        phenny.reply("You just broke Rule 34! Better start uploading...")
        return

    try:
        link = thumbs[0].find('a').attrib['href']
    except AttributeError:
        raise GrumbleError(
            "THE INTERNET IS F*****G BROKEN. Please try again later.")

    response = '!!NSFW!! -> {0} <- !!NSFW!!'.format(link)
    phenny.reply(response)
Пример #7
0
def handle_error(error):
    response = error.read()
    err = json.loads(response.decode('utf-8'))
    if 'explanation' in err:
        raise GrumbleError('Error {:d}: {:s}'.format(err['code'],
                                                     err['explanation']))
    raise GrumbleError('Error {:d}: {:s}'.format(err['code'], err['message']))
Пример #8
0
def apertium_listpairs(phenny, input):
    """Lists translation pairs available to apertium translation"""
    lang = input.group(2)

    opener = urllib.request.build_opener()
    opener.addheaders = headers

    response = opener.open(APIurl + '/listPairs').read()

    langs = json.loads(response.decode('utf-8'))

    langs = json.loads(response.decode('utf-8'))
    if langs['responseData'] is []:
        raise GrumbleError(APIerrorData)
    if int(langs['responseStatus']) != 200:
        raise GrumbleError(APIerrorHttp %
                           (langs['responseStatus'], langs['responseDetails']))

    if not lang:
        allpairs = ""
        first = True
        for pair in langs['responseData']:
            if not first:
                allpairs += ","
            else:
                first = False
            allpairs += "%s→%s" % (pair['sourceLanguage'],
                                   pair['targetLanguage'])
        phenny.say(allpairs)
    else:
        toLang = []
        fromLang = []
        for pair in langs['responseData']:
            if pair['sourceLanguage'] == lang:
                fromLang.append(pair['targetLanguage'])
            if pair['targetLanguage'] == lang:
                toLang.append(pair['sourceLanguage'])
        first = True
        froms = ""
        for lg in fromLang:
            if not first:
                froms += ", "
            else:
                first = False
            froms += lg
        first = True
        tos = ""
        for lg in toLang:
            if not first:
                tos += ", "
            else:
                first = False
            tos += lg
        #finals = froms + (" → %s → " % lang) + tos
        finals = tos + (" → %s → " % lang) + froms

        phenny.say(finals)
Пример #9
0
def urbandict(phenny, input):
    """.urb <word> - Search Urban Dictionary for a definition."""

    word = input.group(2)
    if not word:
        phenny.say(urbandict.__doc__.strip())
        return

    try:
        data = web.get(
            "http://api.urbandictionary.com/v0/define?term={0}".format(
                web.quote(word)))
        data = json.loads(data)
    except:
        raise GrumbleError(
            "Urban Dictionary slemped out on me. Try again in a minute.")

    results = data['list']

    if not results:
        phenny.say("No results found for {0}".format(word))
        return

    result = results[0]
    url = 'http://www.urbandictionary.com/define.php?term={0}'.format(
        web.quote(word))

    response = "{0} - {1}".format(result['definition'].strip()[:256], url)
    phenny.say(response)
Пример #10
0
def urbandict(phenny, input):
    """.urb <word> - Search Urban Dictionary for a definition."""

    word = input.group(2)
    if not word:
        phenny.say(urbandict.__doc__.strip())
        return

    # create opener
    #opener = urllib.request.build_opener()
    #opener.addheaders = [
    #    ('User-agent', web.Grab().version),
    #    ('Referer', "http://m.urbandictionary.com"),
    #]

    try:
        data = web.get(
            "http://api.urbandictionary.com/v0/define?term={0}".format(
                web.quote(word)))
        data = json.loads(data)
    except:
        raise GrumbleError(
            "Urban Dictionary slemped out on me. Try again in a minute.")

    if data['result_type'] == 'no_results':
        phenny.say("No results found for {0}".format(word))
        return

    result = data['list'][0]
    url = 'http://www.urbandictionary.com/define.php?term={0}'.format(
        web.quote(word))

    response = "{0} - {1}".format(result['definition'].strip()[:256], url)
    phenny.say(response)
Пример #11
0
def randomreddit(phenny, input):

    subreddit = input.group(2)
    if not subreddit:
        phenny.say(".random <subreddit> - get a random link from the subreddit's frontpage")
        return
        
    if not re.match('^[A-Za-z0-9_-]*$',subreddit):
        phenny.say(input.nick + ": bad subreddit format.")
        return


    url = "http://www.reddit.com/r/" + subreddit + "/.json"
    try:
        resp = web.get(url)
    except:
        try:
            resp = web.get(url)
        except:
            try:
                resp = web.get(url)
            except:
                raise GrumbleError('Reddit or subreddit unreachable.')
    
    reddit = json.loads(resp)
    post = choice(reddit['data']['children'])

    nsfw = False
    if post['data']['over_18']:
        nsfw = True
    
    if nsfw:
        phenny.reply("!!NSFW!! " + post['data']['url'] + " (" + post['data']['title'] + ") !!NSFW!!")
    else:
        phenny.reply(post['data']['url'] + " (" +  post['data']['title'] + ")")
Пример #12
0
def apertium_generate(phenny, input):
    '''Use Apertium APy's generate functionality'''
    cmd, cmd_error = strict_check(r'(' + langRE + r')\s+(.*)', input.group(1),
                                  apertium_generate)

    if cmd_error:
        raise GrumbleError(cmd_error)
        return

    opener = urllib.request.build_opener()
    opener.addheaders = headers

    try:
        response = opener.open('{:s}/generate?lang={:s}&q={:s}'.format(
            phenny.config.APy_analyseURL, web.quote(cmd.group(1)),
            web.quote(cmd.group(2).strip()))).read()
    except urllib.error.HTTPError as error:
        handle_error(error)

    jobj = json.loads(response.decode('utf-8'))
    messages = []

    for generation, original in jobj:
        messages.append(original + '  →  ' + generation)

    more.add_messages(phenny, input.nick, messages)
Пример #13
0
    def test_translate_non_langs(self, mock_handle, mock_open):
        mock_handle.side_effect = GrumbleError('some message')

        # non-existent language
        self.input.group.return_value = 'spa-zzz ' + self.texts['spa']
        mock_open.side_effect = HTTPError('url', 400, 'msg', 'hdrs', None)
        apy.apertium_translate(self.phenny, self.input)
        self.assertTrue(mock_handle.called)
        self.phenny.say.assert_called_once_with('spa-zzz: some message')
        self.reset_mocks(self.phenny, mock_open, mock_handle)

        # bad input
        self.check_exceptions([self.texts['spa'], 'spa ' + self.texts['spa'], 'spa-eng'],
                              apy.apertium_translate)
        self.check_exceptions(['en-en Translate to the same language?'],
                              apy.apertium_translate, 'self-translation')
        self.reset_mocks(self.phenny, mock_open, mock_handle)

        # non-existent language with actual language
        self.input.group.return_value = 'spa-eng spa-zzz ' + self.texts['spa']
        mock_open.side_effect = [mock.MagicMock(read=lambda: self.fake_json('eng')),
                                 HTTPError('url', 400, 'msg', 'hdrs', None)]
        apy.apertium_translate(self.phenny, self.input)
        self.assertEqual(mock_open.call_args_list, [mock.call(self.format_query('spa', 'eng')),
                                                    mock.call(self.format_query('spa', 'zzz'))])
        self.assertTrue(mock_handle.called)
        self.phenny.reply.assert_called_once_with(self.texts['eng'])
        self.phenny.say.assert_called_once_with('spa-zzz: some message')
Пример #14
0
def f_weather(phenny, input):
    """.weather <ICAO> - Show the weather at airport with the code <ICAO>."""
    icao_code = input.group(2)
    if not icao_code:
        return phenny.say("Try .weather London, for example?")

    icao_code = code(phenny, icao_code)

    if not icao_code:
        phenny.say("No ICAO code found, sorry")
        return

    uri = 'http://tgftp.nws.noaa.gov/data/observations/metar/stations/%s.TXT'
    try:
        bytes = web.get(uri % icao_code)
    except AttributeError:
        raise GrumbleError('OH CRAP NOAA HAS GONE DOWN THE WEB IS BROKEN')
    except web.HTTPError:
        phenny.say("No NOAA data available for that location.")
        return

    if 'Not Found' in bytes:
        phenny.say(icao_code + ": no such ICAO code, or no NOAA data")
        return

    phenny.say(str(metar.parse(bytes)))
Пример #15
0
def randquote_fetcher(phenny, topic, to_user):
    # create opener
    opener = urllib.request.build_opener()
    opener.addheaders = [
        ('User-agent', web.Grab().version),
        ('Referer', "http://quotes.firespeaker.org/"),
    ]

    try:
        url = "http://quotes.firespeaker.org/random.php"
        if topic:
            url += "?topic=%s" % web.quote(topic)
        req = opener.open(url)
        data = req.read().decode('utf-8')
        data = json.loads(data)
    except (HTTPError, IOError, ValueError) as e:
        raise GrumbleError("Firespeaker.org down? Try again later.") from e

    if len(data) == 0:
        phenny.say("No results found")
        return

    #result = data['list'][0]
    #url = 'http://www.urbandictionary.com/define.php?term={0}'.format(web.quote(word))
    #
    #response = "{0} - {1}".format(result['definition'].strip()[:256], url)

    if data['quote'] != None:
        quote = data['quote'].replace('</p>', '').replace('<p>', '').replace('<em>', '_').replace('</em>', '_').replace('&mdash;', '—')
        response = data['short_url'] + ' - ' + quote
    else:
        phenny.say("Sorry, no quotes returned!")
        return

    more.add_messages(phenny, to_user, response.split('\n'))
Пример #16
0
def search(query):
    query = web.quote(query)
    try:
        r = web.get(SEARCH_URL.format(query), verify=False)
    except (web.ConnectionError, web.HTTPError):
        raise GrumbleError(
            "THE INTERNET IS F*****G BROKEN. Please try again later.")

    # apparently the failure mode if you search for <3 characters is a blank
    # XML page...
    if len(r) <= 0:
        return False

    xml = lxml.etree.fromstring(r.encode('utf-8'))
    results = xml.findall('{0}directory-entries/{0}entry'.format(NS))
    if len(results) <= 0:
        return False

    ret = []
    for entry in results:
        entry_data = {}
        for attr in entry.findall('{0}attr'.format(NS)):
            entry_data[attr.attrib['name']] = attr[0].text
        ret.append(entry_data)

    return ret
Пример #17
0
def lines(phenny, input):
    """.lines <nickname> (<today/yesterday/YYYYMMDD>) - Returns the number of lines a user posted on a specific date."""

    if input.group(2):
        info = input.group(2).split(" ")

        if len(info) == 1:
            nickname = info[0]
            date = "today"
        elif len(info) == 2:
            nickname = info[0]
            date = info[1]
        else:
            phenny.reply(
                ".lines <nickname> (<today/yesterday/YYYYMMDD>) - Returns the number of lines a user posted on a specific date."
            )
            return

    else:
        nickname = input.nick
        date = "today"

    try:
        req = web.post("http://linx.li/vtluuglines", {
            'nickname': nickname,
            'date': date,
            'sender': input.nick
        })
    except (HTTPError, IOError):
        raise GrumbleError(
            "THE INTERNET IS F*****G BROKEN. Please try again later.")

    phenny.reply(req)
Пример #18
0
def foodvote(phenny, input):
    """.foodvote"""
    if not input.group(2) or not input.group(3):
        return phenny.reply("You need to specify a place and time, as in "\
                ".foodvote hokie haus 18:45")

    key = input.group(4) or input.sender
    postdata = {
        'user': input.nick,
        'restaurant': input.group(2),
        'start': input.group(3),
        'key': key.strip(),
    }
    postdata['sig'] = _sign_vote(phenny.config.foodforus_api_key, postdata)

    try:
        req = web.post(API_URL + '/vote', postdata)
        data = json.loads(req)
    except:
        raise GrumbleError("Uh oh, I couldn't contact foodforus. HOW WILL WE "\
                "EAT NOW‽")

    if 'error' in data:
        phenny.reply(data['error'])
    else:
        phenny.reply("Your vote has been recorded.")
Пример #19
0
def translate(phenny, translate_me, input_lang, output_lang='en'):
    opener = urllib.request.build_opener()
    opener.addheaders = headers

    input_lang, output_lang = web.quote(input_lang), web.quote(output_lang)
    translate_me = web.quote(translate_me)

    try:
        apy_url = phenny.config.APy_url
    except:
        apy_url = 'http://apy.projectjj.com'

    try:
        response = opener.open(
            '{:s}/translate?q={:s}&langpair={:s}|{:s}'.format(
                apy_url, translate_me, input_lang, output_lang)).read()
    except urllib.error.HTTPError as error:
        handle_error(error)

    responseArray = json.loads(response.decode('utf-8'))

    if responseArray['responseData']['translatedText'] == []:
        raise GrumbleError(Apy_errorData)

    translated_text = responseArray['responseData']['translatedText']
    return translated_text
Пример #20
0
def apertium_translate(phenny, input):
    """Translates a phrase using the apertium API"""
    line = input.group(2)
    if not line:
        raise GrumbleError("Need something to translate!")
    #line = line.encode('utf-8')

    pairs = []
    guidelines = line.split('|')
    if len(guidelines) > 1:
        for guideline in guidelines[1:]:
            #phenny.say(guideline)
            pairs.append(guideline.strip().split('-'))
    guidelines = guidelines[0]
    #phenny.say("guidelines: "+str(guidelines))
    stuff = re.search('(.*) ([a-z]+-[a-z]+)', guidelines)
    #phenny.say('groups: '+str(stuff.groups()))
    pairs.insert(0, stuff.group(2).split('-'))
    translate_me = stuff.group(1)
    #phenny.say(str(pairs))

    #output_lang = line.split(' ')[-1]
    #input_lang = line.split(' ')[-2]
    #translate_me = ' '.join(line.split(' ')[:-2])

    if (len(translate_me) > 350) and (not input.admin):
        raise GrumbleError('Phrase must be under 350 characters.')

    msg = translate_me
    finalmsg = False
    translated = ""
    for (input_lang, output_lang) in pairs:
        if input_lang == output_lang:
            raise GrumbleError(
                'Stop trying to confuse me!  Pick different languages ;)')
        msg = translate(msg, input_lang, output_lang)
        if not msg:
            raise GrumbleError('The %s to %s translation failed, sorry!' %
                               (input_lang, output_lang))
        msg = web.decode(msg)  # msg.replace('&#39;', "'")
        this_translated = "(%s-%s) %s" % (input_lang, output_lang, msg)
        translated = msg

    #if not finalmsg:
    #   finalmsg = translated
    #phenny.reply(finalmsg)
    phenny.reply(translated)
Пример #21
0
def translate(translate_me, input_lang, output_lang='en'):
    opener = urllib.request.build_opener()
    opener.addheaders = headers

    input_lang, output_lang = web.quote(input_lang), web.quote(output_lang)
    translate_me = web.quote(translate_me)

    response = opener.open(APIurl+'/translate?q='+translate_me+'&langpair='+input_lang+"|"+output_lang).read()

    responseArray = json.loads(response.decode('utf-8'))
    if int(responseArray['responseStatus']) != 200:
        raise GrumbleError(APIerrorHttp % (responseArray['responseStatus'], responseArray['responseDetails']))
    if responseArray['responseData']['translatedText'] == []:
        raise GrumbleError(APIerrorData)

    translated_text = responseArray['responseData']['translatedText']
    return translated_text
Пример #22
0
def translate(translate_me, input_lang, output_lang='en'):
    input_lang, output_lang = web.quote(input_lang), web.quote(output_lang)
    translate_me = web.quote(translate_me)

    response = get_page(
        'api.apertium.org', '/json/translate?q=%s&langpair=%s|%s' %
        (translate_me, input_lang, output_lang))

    responseArray = json.loads(response)
    if int(responseArray['responseStatus']) != 200:
        raise GrumbleError(APIerrorHttp % (responseArray['responseStatus'],
                                           responseArray['responseDetails']))
    if responseArray['responseData']['translatedText'] == []:
        raise GrumbleError(APIerrorData)

    translated_text = responseArray['responseData']['translatedText']
    return translated_text
Пример #23
0
def commit(phenny, input):
    """.commit - Get a What the Commit commit message."""

    try:
        msg = web.get("http://whatthecommit.com/index.txt")
    except:
        raise GrumbleError(
            "THE INTERNET IS F*****G BROKEN. Please try again later.")

    phenny.reply(msg)
Пример #24
0
def mlih(phenny, input):
    """.mlih - My life is ho."""
    try:
        req = web.get("http://mylifeisho.com/random")
    except (HTTPError, IOError):
        raise GrumbleError("MLIH is giving some dome to some lax bros.")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('storycontent')[0][0].text_content()
    phenny.say(quote)
Пример #25
0
def mlihp(phenny, input):
    """.mlihp - My life is Harry Potter."""
    try:
        req = web.get("http://www.mylifeishp.com/random")
    except (HTTPError, IOError):
        raise GrumbleError("This service is not available to Muggles.")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('oldlink')[0].text_content()
    phenny.say(quote)
Пример #26
0
def mlig(phenny, input):
    """.mlig - My life is ginger."""
    try:
        req = web.get("http://www.mylifeisginger.org/random")
    except (HTTPError, IOError):
        raise GrumbleError("Busy eating your soul. Be back soon.")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('oldlink')[0].text_content()
    phenny.say(quote)
Пример #27
0
def mlib(phenny, input):
    """.mlib - My life is bro."""
    try:
        req = web.get("http://mylifeisbro.com/random")
    except (HTTPError, IOError):
        raise GrumbleError("MLIB is out getting a case of Natty. It's chill.")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('storycontent')[0][0].text_content()
    phenny.say(quote)
Пример #28
0
def fml(phenny, input):
    """.fml"""
    try:
        req = web.get("http://www.fmylife.com/random")
    except (HTTPError, IOError):
        raise GrumbleError("I tried to use .fml, but it was broken. FML")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('article')[0][0].text_content()
    phenny.say(quote)
Пример #29
0
def mlit(phenny, input):
    """.mlit - My life is Twilight."""
    try:
        req = web.get("http://mylifeistwilight.com/random")
    except (HTTPError, IOError):
        raise GrumbleError("Error: Your life is too Twilight. Go outside.")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('fmllink')[0].text_content()
    phenny.say(quote)
Пример #30
0
def fml(phenny, input):
    """.fml - Grab something from fmylife.com."""
    try:
        req = web.get("http://www.fmylife.com/random")
    except:
        raise GrumbleError("I tried to use .fml, but it was broken. FML")

    doc = lxml.html.fromstring(req)
    quote = doc.find_class('fmllink')[0][0].text_content()
    phenny.say(quote)