def makeresponse(self, name, res, channel, sep=" .. "): """ loop over result to make a response. """ item = self.byname(name) result = u"[%s] - " % name try: itemslist = item.itemslists.data[jsonstring([name, channel])] except KeyError: item = self.byname(name) if item == None: return "no %s rss item" % name else: item.itemslists.data[jsonstring([name, channel])] = ['title', 'link'] item.itemslists.save() for j in res: if item.markup.get(jsonstring([name, channel]), 'skipmerge') and 'Merge branch' in j['title']: continue resultstr = u"" for i in item.itemslists.data[jsonstring([name, channel])]: try: ii = getattr(j, i) if not ii: continue ii = unicode(ii) if ii.startswith('http://'): if item.markup.get(jsonstring([name, channel]), 'tinyurl'): try: tinyurl = get_tinyurl(ii) logging.debug('rss - tinyurl is: %s' % str(tinyurl)) if not tinyurl: resultstr += u"%s - " % ii else: resultstr += u"%s - " % tinyurl[0] except Exception, ex: handle_exception() resultstr += u"%s - " % item else: resultstr += u"%s - " % ii else: resultstr += u"%s - " % ii.strip() except (KeyError, AttributeError), ex: logging.info('hubbub - %s - %s' % (name, str(ex))) continue
def getUrlInfo(text): """ get info of urls in given txt. """ out = '' text = sanitize(text) urls = getUrls(text) if urls: idx = 1 for i in urls: o = '' try: server = xmlrpclib.ServerProxy( "http://whatisthisfile.appspot.com/xmlrpc") logging.info('urlinfo - XMLRPC query: %s' % i) urlinfo = server.app.query(i) if urlinfo.has_key('html'): if urlinfo['html'].has_key('title'): o += 'Title: "%s" ' % urlinfo['html']['title'].strip() elif urlinfo.has_key('image') and plugcfg.get("showpictures"): o += 'Image: %dx%d ' % (urlinfo['image']['width'], urlinfo['image']['height']) if not o: continue if len(o): if len(urls) > 1: out += ' ' + str(idx) + '. ' idx += 1 out += o if "tinyurl" in i: out = out.strip() out += " - %s" % i elif not "http://" in out: out = out.strip() out += " - %s" % get_tinyurl(i)[0] except Exception: pass return out.strip()
def handle_confluence_search(bot, ievent): if "channels" not in cfg.data or ievent.channel not in cfg.data["channels"]: ievent.reply("Confluence wiki search not enabled for this channel") return serverName = cfg.data["channels"][ievent.channel] server = cfg.data["servers"][serverName] if len(ievent.args) == 0: ievent.reply("The wiki is located at %s" % server["url"]) return args = ievent.args if args[0][0] == "#": maxResults = int(args[0].strip("#")) args = args[1:] else: maxResults = 5 query = " ".join(args) client, auth = getRpcClient(server) results = client.search(auth, query, maxResults) ievent.reply("Displaying %s result(s) :" % min(maxResults, len(results))) for page in results[:maxResults]: tinyurl = get_tinyurl(page["url"]) tinyurl = tinyurl[0] if tinyurl else page["url"] ievent.reply('"%s": %s' % (page["title"], tinyurl))
def handle_lmgt(bot, ievent): """ google something for them; syntax: lmgt [search terms] """ if len(ievent.args) < 1: ievent.reply("syntax: lmgt [search terms]") return a = "http://lmgtfy.com/?q=%s" % quote(" ".join(ievent.args)) ievent.reply("Let me google that for you: %s" % get_tinyurl(a)[0])
def doLookup(bot, ievent): logging.info("Doing lookup for fisheye changeset") fnd = gitHashRule.match(ievent.txt) for pname in cfg.data[ievent.channel]: project = cfg.data["projects"][pname] try: server, auth = getRpcClient(project) res = server.getChangeset(auth, pname, fnd.group(1)) logging.info("response from fisheye: %s" % res) cs_url = "%s/changelog/%s?cs=%s" % (project["url"], pname, res["csid"]) bot.say( ievent.channel, "%s- %s by %s: %s %s" % (pname, res["csid"][:7], res["author"], res["log"].strip()[:60], get_tinyurl(cs_url)[0]), ) return except: print "Couldn't find %s" % fnd.group(1)
def getUrlInfo(text): out = '' text = sanitize(text) urls = getUrls(text) if urls: idx = 1 for i in urls: o = '' try: #socket.setdefaulttimeout(30) server = xmlrpclib.ServerProxy( "http://whatisthisfile.appspot.com/xmlrpc") logging.info('urlinfo - XMLRPC query: %s' % i) urlinfo = server.app.query(i) if urlinfo.has_key('html'): if urlinfo['html'].has_key('title'): o += 'Title: "%s" ' % urlinfo['html']['title'].strip() elif urlinfo.has_key('image'): o += 'Image: %dx%d ' % (urlinfo['image']['width'], urlinfo['image']['height']) if not o: continue #if urlinfo.has_key('real_url'): # if urlinfo['real_url'] != i: # o += 'Redirect: %s ' % (urlinfo['real_url']) if len(o): if len(urls) > 1: out += ' ' + str(idx) + '. ' idx += 1 out += o if "tinyurl" in i: out = out.strip() out += " - %s" % i elif not "http://" in out: out = out.strip() out += " - %s" % get_tinyurl(i)[0] except Exception: pass return out.strip()
def getUrlInfo(text): """ get info of urls in given txt. """ out = '' text = sanitize(text) urls = getUrls(text) if urls: idx = 1 for i in urls: o = '' try: server = xmlrpclib.ServerProxy("http://whatisthisfile.appspot.com/xmlrpc") logging.info('urlinfo - XMLRPC query: %s' % i) urlinfo = server.app.query(i) if urlinfo.has_key('html'): if urlinfo['html'].has_key('title'): o += 'Title: "%s" ' % urlinfo['html']['title'].strip() elif urlinfo.has_key('image') and plugcfg.get("showpictures"): o += 'Image: %dx%d ' % (urlinfo['image']['width'], urlinfo['image']['height']) if not o: continue if len(o): if len(urls) > 1: out += ' ' + str(idx) + '. ' ; idx += 1 out += o if "tinyurl" in i: out = out.strip() ; out += " - %s" % i elif not "http://" in out: out = out.strip() ; out += " - %s" % get_tinyurl(i)[0] except Exception: pass return out.strip()
def doLookup(bot, ievent): logging.info("Doing lookup for fisheye changeset") fnd = gitHashRule.match(ievent.txt) for pname in cfg.data[ievent.channel]: project = cfg.data["projects"][pname] try: server, auth = getRpcClient(project) res = server.getChangeset(auth, pname, fnd.group(1)) logging.info('response from fisheye: %s' % res) cs_url = "%s/changelog/%s?cs=%s" % (project["url"], pname, res["csid"]) bot.say(ievent.channel, "%s- %s by %s: %s %s" % (pname, res["csid"][:7], res["author"], res["log"].strip()[:60], get_tinyurl(cs_url)[0])) return except: print "Couldn't find %s" % fnd.group(1)
if len(ievent.args) == 0: ievent.reply("The wiki is located at %s" % server["url"]) return args = ievent.args if args[0][0] == "#": maxResults = int(args[0].strip("#")) args = args[1:] else: maxResults = 5 query = " ".join(args) try: client, auth = getRpcClient(server) results = client.search(auth, query, maxResults) except Exception, ex: ievent.reply("an error occured: %s" % str(ex)) return ievent.reply("Displaying %s result(s) :" % min(maxResults, len(results))) for page in results[:maxResults]: tinyurl = get_tinyurl(page["url"]) tinyurl = tinyurl[0] if tinyurl else page["url"] ievent.reply('"%s": %s' % (page["title"], tinyurl)) cmnds.add("wiki", handle_confluence_search, ["OPER", "USER", "GUEST"]) examples.add("wiki", "perform a lookup in the selected confluence instance", "wiki #5 some search text")
def doLookup(bot, ievent): fnd = gitHashRule.match(ievent.txt) for project in cfg.data[ievent.channel]: try: res = gh.commits.show(project, sha=fnd.group(1)) logging.info('response from github: %s' % res) bot.say(ievent.channel, "%s- %s by %s: %s %s" % (project, res.id[:7], res.author["name"], res.message[:60], get_tinyurl("https://github.com" + res.url)[0])) return except: print "Couldn't find %s" % fnd.group(1)