def doscan(self, scanlist): for botname, channel, name, node in scanlist: try: result = geturl('http://%s/7.html' % node) except Exception, ex: rlog(10, 'shoutcast', "can't get %s shoutcast data: %s" % \ (node, str(ex))) continue try: res = result.split(',')[6] except IndexError: rlog(10, 'shoutcast', "can't match %s shoutcast data" % node) continue song = striphtml(res).strip().replace('\n', '') bot = fleet.byname(botname) if bot and channel in bot.state['joinedchannels']: got = False for ttime, played in self.songsplayed: if played == song: got = True if not got: self.songsplayed.append((time.time(), song)) bot.say(channel, "now playing on %s: %s" % (name, song)) else: for ttime, played in self.songsplayed: if time.time() - ttime > 1800: self.songsplayed.remove((ttime, played))
def fetch(self): meta = {} data = geturl(self.base_url + self.name) for line in data.splitlines(): try: if 'Latest Episode:' in line and not meta.has_key('last ep'): meta['last ep'] = line.split('>')[14].split('</a')[0].split("'")[1].split('/')[-1] test_date = self.re_date.search(line) if test_date: meta['last ep'] = '%s (aired %s)' % (meta['last ep'], test_date.group(1)) elif 'Next Episode:' in line and not meta.has_key('next ep'): meta['next ep'] = line.split('>')[7].split('</a')[0].split("'")[1].split('/')[-1] test_date = self.re_date.search(line) if test_date: meta['next ep'] = '%s (airs %s)' % (meta['next ep'], test_date.group(1)) elif 'Genre:' in line and not meta.has_key('genre'): meta['genre'] = line.split('>')[6].split('</td')[0] elif 'Status:' in line and not meta.has_key('status'): meta['status'] = line.split('>')[6].split('</td')[0] elif 'Runtime:' in line and not meta.has_key('runtime'): meta['runtime'] = line.split('>')[6].split('</td')[0] except IndexError: pass meta = meta.items() meta.sort() return meta
def show(self, bugId): assert bugId.isdigit(), "bug id has to be a number" bugxml = geturl(self.show_url_xml(bugId)) bugdom = xml.dom.minidom.parseString(bugxml) try: bugset = bugdom.getElementsByTagName('bug')[0] except: raise BugTrackerNotFound('could not find bug tag') bugget = { 'creation_ts': 'created', 'product': 'product', 'component': 'component', 'version': 'version', 'bug_status': 'status', 'resolution': 'resolution', 'priority': 'priority', 'bug_severity': 'severity', 'reporter': 'reporter', 'short_desc': 'description', 'assigned_to': 'assigned to' } data = {} for tag in bugget.keys(): try: value = bugset.getElementsByTagName( tag)[0].firstChild.nodeValue data[bugget[tag]] = value except: pass return data
def fetch_credentials(self): """On the front page, a username and password are generated and have a limited validity.""" html = geturl('http://www.freetranslation.com') test_username = self.re_username.search(html) test_password = self.re_password.search(html) credentials = {} if test_username and test_password: credentials['username'] = test_username.group(1) credentials['password'] = test_password.group(1) return credentials
def handle_sc(bot, ievent): try: server = ievent.args[0] except IndexError: ievent.missing('<server>') return try: result = geturl('http://%s/7.html' % server) except Exception, ex: ievent.reply("can't get shoutcast data: %s" % str(ex)) return
def geturl_validate(url): """ validate url """ url = urlvalidate % urllib.urlencode({'uri': url}) try: result = geturl(url) except IOError, ex: try: errno = ex[0] except IndexError: handle_exception() return return False
def getwikidata(url, ievent): """ fetch wiki data """ try: result = fromenc(geturl(url)) except IOError, ex: try: errno = ex[0] except IndexError: handle_exception(ievent=ievent) return ievent.reply('invalid option') return
def list(self): bugrss = geturl(self.list_url()) bugdom = xml.dom.minidom.parseString(bugrss) bugall = bugdom.getElementsByTagName('entry') bugs = [] if bugall: for entry in bugall: try: bugid = entry.getElementsByTagName('id')[0].firstChild.nodeValue.split(':')[2] bugs.append(bugid) except: pass return bugs
def dowebtest(nrloop): a = examples.getexamples() teller = 0 while 1: nrloop -= 1 if nrloop == 0: break random.shuffle(a) for z in a: teller += 1 no = 0 for zz in donot: if z.find(zz) != -1: no = 1 break print z try: print geturl('http://localhost:8088/dispatch?command=%s' % z) except IOError: pass except: os._exit(0)
def handle_mash(bot, ievent): if not ievent.rest: ievent.missing('<what>') return data = geturl(baseurl + '+'.join(ievent.rest.split())) try: results = loads(data) except ValueError: ievent.reply("can't make results of %s" % data) return res = [] for result in results['results']: res.append('%s: - <%s>' % (striphtml(result['title']), result['url'])) ievent.reply('results for %s: ' % ievent.rest, res, dot=' || ')
def list(self): bugrss = geturl(self.list_url()) bugdom = xml.dom.minidom.parseString(bugrss) bugall = bugdom.getElementsByTagName('entry') bugs = [] if bugall: for entry in bugall: try: bugid = entry.getElementsByTagName( 'id')[0].firstChild.nodeValue.split(':')[2] bugs.append(bugid) except: pass return bugs
def markovlearnurl(url): """ learn an url """ lines = 0 rlog(10, 'markov', 'learning %s' % url) try: f = geturl(url) for line in f.split('\n'): line = striphtml(line) if lines % 10 == 0: time.sleep(0.01) line = line.strip() if not line: continue markovtalk_learn(line) lines += 1 except Exception, e: rlog(10, 'markov', str(e))
def getwikidata(url): """ fetch wiki data """ result = fromenc(geturl(url)) if not result: return res = rsslist(result) txt = "" for i in res: try: txt = i['text'] break except: pass if not txt: return #txt = re.sub('\[\[Image:([^\[\]]+|\[\[[^\]]+\]\])*\]\]', '', txt) txt = txt.replace('[[', '') txt = txt.replace(']]', '') txt = re.sub('\s+', ' ', txt) return txt
def show(self, bugId): assert bugId.isdigit(), "bug id has to be a number" bugxml = geturl(self.show_url_xml(bugId)) bugdom = xml.dom.minidom.parseString(bugxml) try: bugset = bugdom.getElementsByTagName('bug')[0] except: raise BugTrackerNotFound('could not find bug tag') bugget = {'creation_ts': 'created', 'product': 'product', 'component': 'component', 'version': 'version', 'bug_status': 'status', 'resolution': 'resolution', 'priority': 'priority', 'bug_severity': 'severity', 'reporter': 'reporter', 'short_desc': 'description', 'assigned_to': 'assigned to'} data = {} for tag in bugget.keys(): try: value = bugset.getElementsByTagName(tag)[0].firstChild.nodeValue data[bugget[tag]] = value except: pass return data
def comments(self, bugId): assert bugId.isdigit(), "bug id has to be a number" bugrss = geturl(self.comments_url(bugId)) bugdom = xml.dom.minidom.parseString(bugrss) bugall = bugdom.getElementsByTagName('item') comments = [] if bugall: for item in bugall: title = item.getElementsByTagName('title')[0].firstChild.nodeValue if 'comment added' in title: try: author = item.getElementsByTagName('dc:creator')[0].firstChild.nodeValue except IndexError: author = 'anonymous' comment = item.getElementsByTagName('description')[0].firstChild.nodeValue comment = striphtml(comment.replace('\n', ' ')).strip() while ' ' in comment: comment = comment.replace(' ', ' ') comments.append('%s: %s' % (author, comment)) return comments
def handle_wikiquote(bot, ievent): """ wikiquote <what> .. search wikiquote for <what> """ if not ievent.rest: ievent.missing('<what>') return what = "" lang = 'en' for i in ievent.rest.split(): first = i[0].upper() rest = i[1:] if i.startswith('-'): if len(i) != 3: ievent.reply('invalid option') return lang = i[1:] continue what += "%s%s " % (first, rest) what = what.strip().replace(' ', '_') url = 'http://%s.wikiquote.org/w/wiki.phtml?title=%s' % (lang, what) result = geturl(url) if not result: ievent.reply("can't find data for %s" % what) return result = result.replace('\n', ' ') result = re.sub('\s+', ' ', result) regresult = re.search(wikire, result) if not regresult: ievent.reply("can't match regular expression %s" % url) return txt = regresult.groups()[0] try: res = re.sub( '\[.*?\]', '', striphtml(random.choice(re.findall('<li>(.*?)</li>', txt)))) except IndexError: ievent.reply("can't find quote") return ievent.reply(res)
def comments(self, bugId): assert bugId.isdigit(), "bug id has to be a number" bugrss = geturl(self.comments_url(bugId)) bugdom = xml.dom.minidom.parseString(bugrss) bugall = bugdom.getElementsByTagName('item') comments = [] if bugall: for item in bugall: title = item.getElementsByTagName( 'title')[0].firstChild.nodeValue if 'comment added' in title: try: author = item.getElementsByTagName( 'dc:creator')[0].firstChild.nodeValue except IndexError: author = 'anonymous' comment = item.getElementsByTagName( 'description')[0].firstChild.nodeValue comment = striphtml(comment.replace('\n', ' ')).strip() while ' ' in comment: comment = comment.replace(' ', ' ') comments.append('%s: %s' % (author, comment)) return comments
def handle_wikiquote(bot, ievent): """ wikiquote <what> .. search wikiquote for <what> """ if not ievent.rest: ievent.missing('<what>') return what = "" lang = 'en' for i in ievent.rest.split(): first = i[0].upper() rest = i[1:] if i.startswith('-'): if len(i) != 3: ievent.reply('invalid option') return lang = i[1:] continue what += "%s%s " % (first, rest) what = what.strip().replace(' ', '_') url = 'http://%s.wikiquote.org/w/wiki.phtml?title=%s' % (lang, what) result = geturl(url) if not result: ievent.reply("can't find data for %s" % what) return result = result.replace('\n', ' ') result = re.sub('\s+', ' ', result) regresult = re.search(wikire, result) if not regresult: ievent.reply("can't match regular expression %s" % url) return txt = regresult.groups()[0] try: res = re.sub('\[.*?\]', '', striphtml(random.choice(re.findall('<li>(.*?)</li>',txt)))) except IndexError: ievent.reply("can't find quote") return ievent.reply(res)
def _get(self, app, parameters): url = self._url(app, parameters) data = geturl(url) return self._parse(app, data)
def handle_weather(bot, ievent): """ show weather using Google's weather API """ userhost = "" loc = "" try: nick = ievent.options['--u'] if nick: userhost = getwho(bot, nick) if not userhost: ievent.reply("can't determine username for %s" % nick) return else: try: name = users.getname(userhost) if not name: ievent.reply("%s is not known with the bot" % nick) return us = UserState(name) loc = us['location'] except KeyError: ievent.reply("%s doesn't have his location set in \ userstate" % nick) return except KeyError: pass if not loc: if ievent.rest: loc = ievent.rest else: ievent.missing('--u <nick> or <location>') return query = urlencode({'weather':loc}) weathertxt = geturl('http://www.google.ca/ig/api?%s' % query) if 'problem_cause' in weathertxt: rlog(10, 'weather', 'ERROR: %s' % weathertxt) ievent.reply('an error occured looking up data for %s' % loc) return resultstr = "" if len(weathertxt) > 135: gweather = minidom.parseString(weathertxt) gweather = gweather.getElementsByTagName('weather')[0] if ievent.command == "weather": info = gweather.getElementsByTagName('forecast_information')[0] if info: city = info.getElementsByTagName('city')[0].attributes["data"].value zip = info.getElementsByTagName('postal_code')[0].attributes["data"].value time = info.getElementsByTagName('current_date_time')[0].attributes["data"].value weather = gweather.getElementsByTagName('current_conditions')[0] condition = weather.getElementsByTagName('condition')[0].attributes["data"].value temp_f = weather.getElementsByTagName('temp_f')[0].attributes["data"].value temp_c = weather.getElementsByTagName('temp_c')[0].attributes["data"].value humidity = weather.getElementsByTagName('humidity')[0].attributes["data"].value wind = weather.getElementsByTagName('wind_condition')[0].attributes["data"].value try: wind_km = round(int(wind[-6:-4]) * 1.609344) except ValueError: wind_km = "" if (not condition == ""): condition = " Oh, and it's " + condition + "." resultstr = "As of %s, %s (%s) has a temperature of %sC/%sF with %s. %s (%s km/h).%s" % (time, city, zip, temp_c, temp_f, humidity, wind, wind_km, condition) elif ievent.command == "forecast": forecasts = gweather.getElementsByTagName('forecast_conditions') for forecast in forecasts: condition = forecast.getElementsByTagName('condition')[0].attributes["data"].value low_f = forecast.getElementsByTagName('low')[0].attributes["data"].value high_f = forecast.getElementsByTagName('high')[0].attributes["data"].value day = forecast.getElementsByTagName('day_of_week')[0].attributes["data"].value low_c = round((int(low_f) - 32) * 5.0 / 9.0) high_c = round((int(high_f) - 32) * 5.0 / 9.0) resultstr += "[%s: F(%sl/%sh) C(%sl/%sh) %s]" % (day, low_f, high_f, low_c, high_c, condition) if not resultstr: ievent.reply('%s not found!' % loc) return else: ievent.reply(resultstr)