def show(self, bugId): assert bugId.isdigit(), "bug id has to ba a number" # https://code.google.com/feeds/issues/p/googleappengine/ feed = feedparser.parse(self.show_url(bugId), agent=useragent()) print feed """</content><link rel='replies' type='application/atom+xml' href='http://code.google.com/feeds/issues/p/jsonbot/issues/1/comments/full'/> <link rel='alternate' type='text/html' href='http://code.google.com/p/jsonbot/issues/detail?id=1'/> <link rel='self' type='application/atom+xml' href='https://code.google.com/feeds/issues/p/jsonbot/issues/full/1'/> <author><name>bthate</name><uri>/u/bthate/</uri></author> <issues:closedDate>2010-04-02T19:25:16.000Z</issues:closedDate> <issues:id>1</issues:id> <issues:label>Type-Defect</issues:label> <issues:label>Priority-Medium</issues:label> <issues:owner><issues:uri>/u/bthate/</issues:uri><issues:username>bthate</issues:username></issues:owner> <issues:stars>1</issues:stars> <issues:state>closed</issues:state> <issues:status>Fixed</issues:status></entry>""" if not feed.entries: return {} print feed.entries[0].keys() """[u'issues_label', 'updated_parsed', 'links', u'issues_owner', u'issues_closeddate', 'href', u'issues_status', 'id', u'issues_uri', 'published_parsed', 'title', u'issues_id', u'issues_stars', 'content', 'title_detail', u'issues_state', 'updated', 'link', 'authors', 'author_detail', 'author', u'issues_username', 'summary', 'published']""" wantlist = [u'issues_label', u'issues_owner', u'issues_closeddate', u'issues_status', u'issues_uri', 'title', u'issues_id', u'issues_stars', u'issues_state', 'updated', 'link', 'author', u'issues_username', 'published'] data = feed.entries[0] res = {} for name in wantlist: try: res[name] = data[name] except KeyError: continue return res
def close(self, bugId, reporter, message, action='fixed'): actions = ['fixed', 'invalid', 'wontfix', 'duplicate', 'worksforme'] # input check assert bugId.isdigit(), "bug id has to be a number" assert action in actions, "action has to be one of: %s" % ', '.join( actions) showdata = self.show(bugId) postdata = { 'reporter': reporter, 'comment': message, 'action': 'resolve', 'type': 'defect', 'resolve_resolution': action, 'summary': showdata['summary'], 'priority': showdata['priority'], 'milestone': showdata['milestone'], 'component': showdata['component'], 'version': showdata['version'], 'keywords': showdata['keywords'], 'cc': '', } postdata = urllib.urlencode(postdata) req = urllib2.Request('%s/ticket/%s' % (self.url, bugId), data=postdata) req.add_header('User-agent', useragent()) return urllib2.urlopen(req).read()
def fetchdata(self): """ get data of rss feed. """ url = self.data['url'] if not url: logging.warn("hubbub - %s doesnt have url set" % self.data.name) return [] result = feedparser.parse(url, agent=useragent()) logging.debug("hubbub - fetch - got result from %s" % url) if result and result.has_key('bozo_exception'): logging.info('hubbub - %s bozo_exception: %s' % (url, result['bozo_exception'])) try: status = result.status logging.info("hubbub - status is %s" % status) except AttributeError: status = 200 if status != 200 and status != 301 and status != 302: raise RssStatus(status) return result.entries
def get_tinyurl(url): """ grab a tinyurl. """ res = get(url, namespace='tinyurl') logging.debug('tinyurl - cache - %s' % unicode(res)) if res and res[0] == '[': return json.loads(res) postarray = [ ('submit', 'submit'), ('url', url), ] postdata = urllib.urlencode(postarray) req = urllib2.Request(url=plugcfg.url, data=postdata) req.add_header('User-agent', useragent()) try: res = urllib2.urlopen(req).readlines() except urllib2.URLError, e: logging.warn('tinyurl - %s - URLError: %s' % (url, str(e))) return
def get_tinyurl(url): """ grab a tinyurl. """ res = get(url, namespace='tinyurl') ; logging.debug('tinyurl - cache - %s' % unicode(res)) if res and res[0] == '[': return json.loads(res) postarray = [ ('submit', 'submit'), ('url', url), ] postdata = urllib.urlencode(postarray) req = urllib2.Request(url=plugcfg.url, data=postdata) req.add_header('User-agent', useragent()) try: res = urllib2.urlopen(req).readlines() except urllib2.URLError, e: logging.warn('tinyurl - %s - URLError: %s' % (url, str(e))) ; return except urllib2.HTTPError, e: logging.warn('tinyurl - %s - HTTP error: %s' % (url, str(e))) ; return except Exception, ex: if "DownloadError" in str(ex): logging.warn('tinyurl - %s - DownloadError: %s' % (url, str(e))) else: handle_exception() return
def handle_gcalc(bot, ievent): """ arguments: <expression> - use google calc. """ if len(ievent.args) > 0: expr = " ".join(ievent.args).replace("+", "%2B").replace(" ", "+") else: ievent.missing('Missing an expression') return req = urllib2.Request( "http://www.google.com/ig/calculator?hl=en&q=%s" % expr, None, {'User-agent': useragent()}) data = urllib2.urlopen(req).read() try: rhs = data.split("rhs")[1].split("\"")[1] lhs = data.split("lhs")[1].split("\"")[1] if rhs and lhs: ievent.reply( "%s = %s" % (lhs, rhs.replace('\\x26#215;', '*').replace( '\\x3csup\\x3e', '**').replace('\\x3c/sup\\x3e', ''))) else: ievent.reply("hmmm can't get a result ..") except Exception, ex: ievent.reply(str(ex))
def close(self, bugId, reporter, message, action='fixed'): actions = ['fixed', 'invalid', 'wontfix', 'duplicate', 'worksforme'] # input check assert bugId.isdigit(), "bug id has to be a number" assert action in actions, "action has to be one of: %s" % ', '.join(actions) showdata = self.show(bugId) postdata = { 'reporter': reporter, 'comment': message, 'action': 'resolve', 'type': 'defect', 'resolve_resolution': action, 'summary': showdata['summary'], 'priority': showdata['priority'], 'milestone': showdata['milestone'], 'component': showdata['component'], 'version': showdata['version'], 'keywords': showdata['keywords'], 'cc': '', } postdata = urllib.urlencode(postdata) req = urllib2.Request('%s/ticket/%s' % (self.url, bugId), data=postdata) req.add_header('User-agent', useragent()) return urllib2.urlopen(req).read()
def show(self, bugId): assert bugId.isdigit(), "bug id has to ba a number" # https://code.google.com/feeds/issues/p/googleappengine/ feed = feedparser.parse(self.show_url(bugId), agent=useragent()) print feed """</content><link rel='replies' type='application/atom+xml' href='http://code.google.com/feeds/issues/p/jsonbot/issues/1/comments/full'/> <link rel='alternate' type='text/html' href='http://code.google.com/p/jsonbot/issues/detail?id=1'/> <link rel='self' type='application/atom+xml' href='https://code.google.com/feeds/issues/p/jsonbot/issues/full/1'/> <author><name>bthate</name><uri>/u/bthate/</uri></author> <issues:closedDate>2010-04-02T19:25:16.000Z</issues:closedDate> <issues:id>1</issues:id> <issues:label>Type-Defect</issues:label> <issues:label>Priority-Medium</issues:label> <issues:owner><issues:uri>/u/bthate/</issues:uri><issues:username>bthate</issues:username></issues:owner> <issues:stars>1</issues:stars> <issues:state>closed</issues:state> <issues:status>Fixed</issues:status></entry>""" if not feed.entries: return {} print feed.entries[0].keys() """[u'issues_label', 'updated_parsed', 'links', u'issues_owner', u'issues_closeddate', 'href', u'issues_status', 'id', u'issues_uri', 'published_parsed', 'title', u'issues_id', u'issues_stars', 'content', 'title_detail', u'issues_state', 'updated', 'link', 'authors', 'author_detail', 'author', u'issues_username', 'summary', 'published']""" wantlist = [ u'issues_label', u'issues_owner', u'issues_closeddate', u'issues_status', u'issues_uri', 'title', u'issues_id', u'issues_stars', u'issues_state', 'updated', 'link', 'author', u'issues_username', 'published' ] data = feed.entries[0] res = {} for name in wantlist: try: res[name] = data[name] except KeyError: continue return res
def sendpost(self, postdata): headers = {'Content-Type': 'application/x-www-form-urlencoded', \ 'Accept': 'text/plain; text/html', 'User-Agent': useragent()} self.dorequest('POST', self.path, postdata, headers)
def handle_gcalc(bot, ievent): """ arguments: <expression> - use google calc. """ if len(ievent.args) > 0: expr = " ".join(ievent.args).replace("+", "%2B").replace(" ", "+") else: ievent.missing('Missing an expression') ; return req = urllib2.Request("http://www.google.com/ig/calculator?hl=en&q=%s" % expr, None, {'User-agent': useragent()}) data = urllib2.urlopen(req).read() try: rhs = data.split("rhs")[1].split("\"")[1] lhs = data.split("lhs")[1].split("\"")[1] if rhs and lhs: ievent.reply("%s = %s" % (lhs,rhs.replace('\\x26#215;', '*').replace('\\x3csup\\x3e', '**').replace('\\x3c/sup\\x3e', ''))) else: ievent.reply("hmmm can't get a result ..") except Exception, ex: ievent.reply(str(ex))
def list(self): bugs = feedparser.parse(self.list_url(), agent=useragent()) return bugs.entries