def handle_translate(bot, event): if not event.rest: event.missing("<from> <to> <text>") return query = parse_pair(event.rest.strip()) if not query: event.missing("<from> <to> <text>") return # event.reply(URL % query) rawresult = {} try: rawresult = getjson().loads(geturl2(URL % query)) except: event.reply("Query to Google failed") return # debug # rawresult = {"responseData": {"translatedText":"test"}, "responseDetails": None, "responseStatus": 201} # logging.warn(URL % query) # logging.warn(rawresult) if rawresult['responseStatus'] != 200: event.reply("Error in the query: ", rawresult) return if 'responseData' in rawresult: if 'translatedText' in rawresult['responseData']: translation = rawresult['responseData']['translatedText'] event.reply(translation) else: event.reply("No text available") else: event.reply("Something is wrong, probably the API changed")
def handle_imdb(bot, event): """ arguments: <query> - query the imdb databae at http://www.deanclatworthy.com/imdb/ """ if not event.rest: event.missing("<query>") return query = event.rest.strip() urlquery = query.replace(" ", "+") result = {} rawresult = getjson().loads(geturl2(URL % urlquery)) # the API are limited to 30 query per hour, so avoid querying it just for testing purposes # rawresult = {u'ukscreens': 0, u'rating': u'7.7', u'genres': u'Animation, Drama,Family,Fantasy,Music', u'title': u'Pinocchio', u'series': 0, u'country': u'USA', u'votes': u'23209', u'languages': u'English', u'stv': 0, u'year': None, u'usascreens': 0, u'imdburl': u'http://www.imdb.com/title/tt0032910/'} if not rawresult: event.reply("couldn't look up %s" % query) return if 'error' in rawresult: event.reply("%s" % rawresult['error']) return print rawresult for key in rawresult.keys(): if not rawresult[key]: result[key] = u"n/a" else: result[key] = rawresult[key] for key in result.keys(): try: result[key] = striphtml(decode_html_entities(rawresult[key])) except AttributeError: pass if "year" in rawresult.keys(): event.reply( "%(title)s (%(country)s, %(year)s): %(imdburl)s | rating: %(rating)s (out of %(votes)s votes) | Genres %(genres)s | Language: %(languages)s" % result) else: event.reply( "%(title)s (%(country)s): %(imdburl)s | rating: %(rating)s (out of %(votes)s votes) | Genres %(genres)s | Language: %(languages)s" % result)
def parse(self, response, request): """ parse request/response into a WebEvent. """ logging.warn("parsing %s" % request.body) body = getpostdata_gae(request) logging.warn("body is %s" % body) data = LazyDict(getjson().loads(body)) self.target = data.target self.how = data.how if not self.how: self.how = "channel" input = data.cmnd self.isweb = True self.origtxt = fromenc(input.strip(), self.bot.encoding) self.txt = self.origtxt self.usercmnd = self.txt and self.txt.split()[0] self.groupchat = False self.response = response self.request = request (userhost, user, u, nick) = checkuser(response, request, self) self.userhost = fromenc(userhost) self.nick = fromenc(nick) self.auth = fromenc(userhost) self.stripped = stripped(self.auth) self.domain = None self.channel = stripped(userhost) logging.debug(u"web - parsed - %s - %s" % (self.txt, self.userhost)) self.makeargs() return self
def save(self, attributes=[]): target = {} if attributes: for key in attributes: target[key] = self[key] else: target = cpy(self) targetfile = getdatadir() + os.sep + "containers" + os.sep + str(self.createtime) + "_" + stripname(self.origin) p = Persist(targetfile) p.data = getjson().dumps(target) p.save()
def show_cfg(self, bot, ievent): """ show config options. """ s = [] dumpstr = self.tojson() logging.warn(dumpstr) for key, optionvalue in sorted(getjson().loads(dumpstr).iteritems()): if key in self.hide: continue v = optionvalue if type(v) in [str, unicode]: v = '"' + v + '"' v = str(v) s.append("%s=%s" % (key, v)) ievent.reply("options: " + ' .. '.join(s))
def save(self, attributes=[]): target = {} if attributes: for key in attributes: target[key] = self[key] else: target = cpy(self) targetfile = getdatadir() + os.sep + "containers" + os.sep + str( self.createtime) + "_" + stripname(self.origin) p = Persist(targetfile) p.data = getjson().dumps(target) p.save()
def show_cfg(self, bot, ievent): """ show config options. """ s = [] dumpstr = self.tojson() logging.warn(dumpstr) for key, optionvalue in sorted(getjson().loads(dumpstr).iteritems()): if key in self.hide: continue v = optionvalue if type(v) in [str, unicode]: v = '"'+v+'"' v = str(v) s.append("%s=%s" % (key, v)) ievent.reply("options: " + ' .. '.join(s))
def outnocb(self, channel, txt, how="cache", event=None, origin=None, response=None, dotime=False, *args, **kwargs): txt = self.normalize(txt) if event and event.how != "background": logging.warn("%s - out - %s" % (self.cfg.name, txt)) if "http://" in txt or "https://" in txt: for item in re_url_match.findall(txt): logging.debug("web - raw - found url - %s" % item) url = u'<a href="%s" onclick="window.open(\'%s\'); return false;">%s</a>' % (item, item, item) try: txt = re.sub(item, url, txt) except ValueError: logging.error("web - invalid url - %s" % url) if event: outdict = {"target": event.target or "content_div", "result": txt + "<br>", "how": event.how or "normal"} try: txt = getjson().dumps(outdict) except Exception, ex: handle_exception() ; return if how == "channel": self.update_web(channel, txt) elif how == "direct": self._raw(txt, response) else: self.update_web(channel, txt) else: self.update_web(channel, txt)
def handle_imdb(bot, event): """ arguments: <query> - query the imdb databae at http://www.deanclatworthy.com/imdb/ """ if not event.rest: event.missing("<query>") ; return query = event.rest.strip() urlquery = query.replace(" ", "+") result = {} rawresult = getjson().loads(geturl2(URL % urlquery)) # the API are limited to 30 query per hour, so avoid querying it just for testing purposes # rawresult = {u'ukscreens': 0, u'rating': u'7.7', u'genres': u'Animation, Drama,Family,Fantasy,Music', u'title': u'Pinocchio', u'series': 0, u'country': u'USA', u'votes': u'23209', u'languages': u'English', u'stv': 0, u'year': None, u'usascreens': 0, u'imdburl': u'http://www.imdb.com/title/tt0032910/'} if not rawresult: event.reply("couldn't look up %s" % query) ; return if 'error' in rawresult: event.reply("%s" % rawresult['error']) ; return for key in rawresult.keys(): if not rawresult[key]: result[key] = u"n/a" else: result[key] = rawresult[key] for key in result.keys(): try: result[key] = striphtml(decode_html_entities(rawresult[key])) except AttributeError: pass event.reply("%(title)s (%(country)s, %(year)s): %(imdburl)s | rating: %(rating)s (out of %(votes)s votes) | Genres %(genres)s | Language: %(languages)s" % result )
from jsb.lib.botbase import BotBase from jsb.lib.errors import NotConnected from jsb.drivers.convore.event import ConvoreEvent from jsb.utils.lazydict import LazyDict from jsb.utils.exception import handle_exception from jsb.imports import getjson, getrequests ## basic imports import logging import time import urllib2 ## defines json = getjson() requests = getrequests() ## ConvoreBot class ConvoreBot(BotBase): """ The Convore Bot. """ def __init__(self, cfg=None, usersin=None, plugs=None, botname=None, nick=None, *args, **kwargs): BotBase.__init__(self, cfg, usersin, plugs, botname, nick, *args, **kwargs) self.type = "convore" self.cursor = None if not self.state.has_key("namecache"): self.state["namecache"] = {} if not self.state.has_key("idcache"): self.state["idcache"] = {} self.cfg.nick = cfg.username or "jsonbot"
from config import Config, getmainconfig from datadir import getdatadir from users import users from plugins import plugs from persist import Persist from errors import NoSuchBotType, BotNotEnabled from threads import start_new_thread from eventhandler import mainhandler from jsb.utils.name import stripname from jsb.lib.factory import BotFactory from jsb.utils.lazydict import LazyDict ## simplejson imports from jsb.imports import getjson json = getjson() ## basic imports import Queue import os import types import time import glob import logging import threading import thread import copy ## defines
def querygeoipserver(ip): ipinfo = getjson().loads(geturl2(URL % ip)) return ipinfo
def do_imdb_api_query(query): url = "http://www.deanclatworthy.com/imdb/?" + query logging.warn(url) result = getjson().loads(geturl2(url)) return result