def command(self, user, channel, command, options): response = "" headers = None if command == "googlefight": words = options.split(":") if len(words) == 2: #TODO: blocking data1 = urlutils.download(self.baseURL % words[0].replace(" ", "+")) data2 = urlutils.download(self.baseURL % words[1].replace(" ", "+")) count1 = "0" count2 = "0" match = re.match(self.countRE, data1, re.S) if match: count1 = match.group(1) match = re.match(elf.countRE, data2, re.S) if match: count2 = match.group(1) ansmsg = "Google Fight!: %s siegt ueber %s (%s zu %s Treffer)" if(int(re.sub("\.", "", count1)) > int(re.sub("\.", "", count2))): self.bot.sendmsg(channel, ansmsg % (words[0], words[1], str(count1), count2)) else: self.bot.sendmsg(channel, ansmsg % (words[1], words[0], str(count2), count1)) else: self.bot.sendmsg(channel, "!googlefight wort1:wort2")
def command(self, user, channel, command, options): nick = user.getNick() if command in self.commands and 0 < (time.time() - self.time) < 5: self.bot.sendmsg(channel, "Wait a minute ...") return self.time = time.time() if command == "kurs": d = urlutils.download(self.ku % urllib.parse.quote_plus(options)) d.addCallback(self.parseKurs, channel) d.addErrback(self.error, channel) if command == "wkn": d = urlutils.download(self.wknu % urllib.parse.quote_plus(options)) d.addCallback(self.parseWKN, channel) d.addErrback(self.error, channel)
def command(self, user, channel, command, options): nick = user.split("!")[0] if command in self.commands and 0 < (time.time() - self.time) < 5: self.bot.sendmsg(channel, "Wait a minute ...") return self.time = time.time() if command == "kurs": d = urlutils.download(self.ku % urllib.quote_plus(options)) d.addCallback(self.parseKurs, channel) d.addErrback(self.error, channel) if command == "wkn": d = urlutils.download(self.wknu % urllib.quote_plus(options)) d.addCallback(self.parseWKN, channel) d.addErrback(self.error, channel)
def command(self, user, channel, command, options): response = "" self.parser= titleExtractor() headers=None if "preview" in command: d=urlutils.download(options, headers={'Accept':'text/html'}) d.addCallback(self.processPreview, channel) d.addErrback(self.error, channel) if "tinyurl" in command: if options!="": d=urlutils.download("http://tinyurl.com/api-create.php?url="+options) else: d=urlutils.download("http://tinyurl.com/api-create.php?url="+self.lasturl) d.addCallback(self.processTiny, channel) d.addErrback(self.error, channel)
def command(self, user, channel, command, options, auto=False): # auto = True means, the command was invoced via autoTiny/autoPreview. # then the plugin should NOT post anything, if an error occured. response = "" headers=None if "preview" in command: if options == "": if self.lasturl: options=self.lasturl else: return d=urlutils.get_headers(options) d.addCallback(self.checkForHTML, options, channel, auto) if not auto: d.addErrback(self.error, channel) else: d.addErrback(self.log_error, channel) if "tinyurl" in command: if options == "": options = self.lasturl d=urlutils.download("http://tinyurl.com/api-create.php?url="+options) d.addCallback(self.processTiny, channel) if not auto: d.addErrback(self.error, channel) else: d.addErrback(self.log_error, channel)
def command(self, user, channel, command, options): statusurl = self.config.get("statusurl", "http://localhost:8000/status2.xsl", "icecast", self.bot.network, channel) if options == "": mountpoint = self.config.get("defaultMountpoint", "/radio.ogg", "icecast", \ self.bot.network, channel) else: mountpoint = options if command == "np" or command == "listeners": urlutils.download(statusurl).addCallback(self.downloadFinished, \ command, channel, mountpoint) elif command in ("mountpoints", "mounts"): urlutils.download(statusurl).addCallback(self.downloadFinished, \ "mounts", channel, "")
def command(self, user, channel, command, options): _=self.bot.get_gettext(channel) response = "" headers = None if command == "googlefight": self.words = options.split(":") self.channel = channel self.gotcallback1 = False self.gotcallback2 = False if len(self.words) == 2: data1 = urlutils.download(self.baseUrl % self.words[0].replace(" ", "+")) data2 = urlutils.download(self.baseUrl % self.words[1].replace(" ", "+")) data1.addCallback(self.callback1) data2.addCallback(self.callback2) else: self.bot.sendmsg(channel, _("!googlefight word1:word2"))
def fetch_weather(codes): """ Get the weather from Yahoo! Weather. """ if len(codes) < 1: return defer.succeed([]) url = "http://xml.weather.yahoo.com/forecastrss/%s_c.xml" \ % str(codes[0]['code']) return urlutils.download(url).addCallback(parse_weather)
def download_data(self): if self.source == "otr": ## remove old data for i in os.listdir(self.tvdatadir): try: date = i.split("_")[1] + i.split("_")[2] + i.split( "_")[3].split(".")[0] if int(date) < int( time.strftime("%Y%m%d", time.localtime())): os.remove(self.tvdatadir + "/" + i) except IndexError: pass ## download ne data for i in range(self.days): filename = time.strftime("epg_%Y_%m_%d.csv", time.gmtime(time.time() + 86400 * i)) try: if os.stat(self.tvdatadir + "/" + filename).st_mtime + 43200 < time.time( ): #only download if file is older than 1/2 day urlutils.download(self.dataurl + filename, self.tvdatadir + "/" + filename) except OSError: #FileNotFound urlutils.download(self.dataurl + filename, self.tvdatadir + "/" + filename) elif self.source == "xmltv": urlutils.download(self.dataurl, self.xmltvfile) self.bot.root.getServiceNamed('scheduler').callLater( 10, self.processUpdatedData)
def checkForHTML(self, header, url, channel, auto): if (urlutils.is_html(header)): d=urlutils.download(url, headers={'Accept':'text/html'}) d.addCallback(self.processPreview, channel) if not auto: d.addErrback(self.error, channel) else: d.addErrback(self.log_error, channel) else: info = "" if "content-type" in header: info += "Mime-Type: %s" % header["content-type"] if "content-length" in header: size = urlutils.convert_bytes(header["content-length"]) info += ", %s" % size self.bot.sendmsg(channel, "[Link Info] " + info)
def checkForHTML(self, header, url, channel, auto): if (urlutils.is_html(header)): d=urlutils.download(url, headers={'Accept':'text/html'}) d.addCallback(self.processPreview, channel) if not auto: d.addErrback(self.error, channel) else: d.addErrback(self.log_error, channel) else: info = "" if "content-type" in header: info += u"Mime-Type: %s" % header["content-type"] if "content-length" in header: size = urlutils.convert_bytes(header["content-length"]) info += u", %s" % size self.bot.sendmsg(channel, "[Link Info] " + info)
def download_data(self): if self.source == "otr": ## remove old data for i in os.listdir(self.tvdatadir): try: date = i.split("_")[1] + i.split("_")[2] + i.split("_")[3].split(".")[0] if int(date) < int(time.strftime("%Y%m%d",time.localtime())): os.remove(self.tvdatadir + "/" + i) except IndexError: pass ## download ne data for i in range(self.days): filename = time.strftime("epg_%Y_%m_%d.csv",time.gmtime(time.time()+86400*i)) try: if os.stat(self.tvdatadir + "/" + filename).st_mtime +43200 < time.time(): #only download if file is older than 1/2 day urlutils.download(self.dataurl + filename, self.tvdatadir + "/" + filename) except OSError: #FileNotFound urlutils.download(self.dataurl + filename, self.tvdatadir + "/" + filename) elif self.source == "xmltv": urlutils.download(self.dataurl, self.xmltvfile) self.bot.root.getServiceNamed('scheduler').callLater(10,self.processUpdatedData)
def update_data(self,dataurl): urlutils.download(dataurl, self.xmltvfile) self.bot.root.getServiceNamed('scheduler').callLater(10,self.processUpdatedData,dataurl)
def get_location_code(location): """ Fetch Location code """ loc_enc = urllib.quote_plus(location) url = "http://xoap.weather.com/search/search?where=%s" % loc_enc return urlutils.download(url).addCallback(parse_location_code)
def loadNews(self, url): self.feedLastLoaded[url] = int(time.time()) #to be removed, too? self.logger.debug("loading new Headlines") urlutils.download(url).addCallback(self.parseNews, url)
def command(self, user, channel, command, options): if command=="youtube" and options: urlutils.download("http://gdata.youtube.com/feeds/"+ "base/videos?q=%s&client=ytapi-youtube-search&alt=rss&v=2" %urllib.parse.quote(options.encode("UTF-8"))).addCallback( self.downloadFinished, channel)
def command(self, user, channel, command, options): if command=="youtube" and options: urlutils.download("http://gdata.youtube.com/feeds/"+ "base/videos?q=%s&client=ytapi-youtube-search&alt=rss&v=2" %urllib.quote(options.encode("UTF-8"))).addCallback( self.downloadFinished, channel)