def say(self, channel, txt, result=[], how="msg", event=None, nr=375, extend=0, dot=", ", showall=False, *args, **kwargs): """ default method to send txt from the bot to a user/channel/jid/conference etc. """ logging.warn("saying to %s" % channel) if event: if event.userhost in self.ignore: logging.warn("%s - ignore on %s - no output done" % (self.cfg.name, event.userhost)) ; return if event.how == "msg" and self.type == "irc": target = event.nick else: target = channel if event.pipelined: dres = [] if issubclass(type(result), dict): for key, value in result.iteritems(): dres.append("%s: %s" % (key, value)) for i in dres or result: event.outqueue.append(i) return else: target = channel if showall or (event and event.showall): txt = self.makeresponse(txt, result, dot, *args, **kwargs) else: txt = self.makeoutput(channel, txt, result, nr, extend, dot, origin=target, *args, **kwargs) if txt: txt = decode_html_entities(txt) if event: event.nrout += 1 if event.displayname: txt = "[%s] %s" % (event.displayname, txt) if result: for i in result: event.outqueue.append(i) event.resqueue.append(txt) if event.nooutput: event.ready() ; return else: logging.info("not putting txt on queues") txt = self.outputmorphs.do(txt, event) self.out(target, txt, how, event=event, origin=target, *args, **kwargs)
def geturl_title(url): """ fetch title of url """ try: result = geturl2(url) except urllib.error.HTTPError as ex: logging.warn("HTTPError: %s" % str(ex)) ; return False except urllib.error.URLError as ex: logging.warn("URLError %s" % str(ex)) ; return False except IOError as ex: try: errno = ex[0] except IndexError: handle_exception() ; return return False if not result: return False test_title = re_html_title.search(result) if test_title: # try to find an encoding and standardize it to utf-8 encoding = get_encoding(result) title = test_title.group(1).replace('\n', ' ') title = title.strip() return decode_html_entities(title) return False
def handle_imdb(bot, event): """ arguments: <query> - query the imdb databae at http://www.deanclatworthy.com/imdb/ """ if not event.rest: event.missing("<query>") ; return query = event.rest.strip() urlquery = query.replace(" ", "+") result = {} res = geturl2(URL % urlquery) if not res: event.reply("%s didn't return a result" % (URL % urlquery)) ; return try: rawresult = getjson().loads(res) except ValueError: event.reply("sorry cannot parse data returned from the server: %s" % res) ; return # the API are limited to 30 query per hour, so avoid querying it just for testing purposes # rawresult = {u'ukscreens': 0, u'rating': u'7.7', u'genres': u'Animation, Drama,Family,Fantasy,Music', u'title': u'Pinocchio', u'series': 0, u'country': u'USA', u'votes': u'23209', u'languages': u'English', u'stv': 0, u'year': None, u'usascreens': 0, u'imdburl': u'http://www.imdb.com/title/tt0032910/'} if not rawresult: event.reply("couldn't look up %s" % query) ; return if 'error' in rawresult: event.reply("%s" % rawresult['error']) ; return for key in list(rawresult.keys()): if not rawresult[key]: result[key] = "n/a" else: result[key] = rawresult[key] for key in list(result.keys()): try: result[key] = striphtml(decode_html_entities(str(rawresult[key]))) except AttributeError: pass if "year" in list(rawresult.keys()): event.reply("%(title)s (%(country)s, %(year)s): %(imdburl)s | rating: %(rating)s (out of %(votes)s votes) | Genres %(genres)s | Language: %(languages)s" % result ) else: event.reply("%(title)s (%(country)s): %(imdburl)s | rating: %(rating)s (out of %(votes)s votes) | Genres %(genres)s | Language: %(languages)s" % result )