def get_tinyurl(url): """ grab a tinyurl. """ res = get(url, namespace='tinyurl') ; logging.debug('tinyurl - cache - %s' % str(res)) if res and res[0] == '[': return json.loads(res) postarray = [ ('submit', 'submit'), ('url', url), ] postdata = urllib.parse.urlencode(postarray) req = urllib.request.Request(url=plugcfg.url, data=bytes(postdata, "utf-8")) req.add_header('User-agent', useragent()) try: res = urllib.request.urlopen(req).readlines() except urllib.error.URLError as e: logging.warn('tinyurl - %s - URLError: %s' % (url, str(e))) ; return except urllib.error.HTTPError as e: logging.warn('tinyurl - %s - HTTP error: %s' % (url, str(e))) ; return except Exception as ex: if "DownloadError" in str(ex): logging.warn('tinyurl - %s - DownloadError: %s' % (url, str(e))) else: handle_exception() return urls = [] for line in res: l = str(line, "utf-8") if l.startswith('<blockquote><b>'): urls.append(striphtml(l.strip()).split('[Open')[0]) if len(urls) == 3: urls.pop(0) set(url, json.dumps(urls), namespace='tinyurl') return urls
def fetchdata(self): """ get data of rss feed. """ url = self.data['url'] if not url: logging.warn("%s doesnt have url set" % self.data.name) return [] result = feedparser.parse(url, agent=useragent()) logging.debug("got result from %s" % url) if result and 'bozo_exception' in result: logging.info('%s bozo_exception: %s' % (url, result['bozo_exception'])) try: status = result.status logging.info("status is %s" % status) except AttributeError: status = 200 if status != 200 and status != 301 and status != 302: raise RssStatus(status) return result.entries
def show(self, bugId): assert bugId.isdigit(), "bug id has to ba a number" # https://code.google.com/feeds/issues/p/googleappengine/ feed = feedparser.parse(self.show_url(bugId), agent=useragent()) if not feed.entries: return {} """[u'issues_label', 'updated_parsed', 'links', u'issues_owner', u'issues_closeddate', 'href', u'issues_status', 'id', u'issues_uri', 'published_parsed', 'title', u'issues_id', u'issues_stars', 'content', 'title_detail', u'issues_state', 'updated', 'link', 'authors', 'author_detail', 'author', u'issues_username', 'summary', 'published']""" wantlist = ['issues_label', 'issues_owner', 'issues_closeddate', 'issues_status', 'issues_uri', 'title', 'issues_id', 'issues_stars', 'issues_state', 'updated', 'link', 'author', 'issues_username', 'published'] data = feed.entries[0] res = {} for name in wantlist: try: res[name] = data[name] except KeyError: continue return res
def get_tinyurl(url): """ grab a tinyurl. """ from tl.utils.url import enabled if not enabled: raise URLNotEnabled res = get(url, namespace="tinyurl") logging.debug("tinyurl - cache - %s" % str(res)) if res and res[0] == "[": return json.loads(res) postarray = [("submit", "submit"), ("url", url)] postdata = urllib.parse.urlencode(postarray) postbytes = bytes(postdata, "utf-8") req = urllib.request.Request(url=posturl, data=postbytes) req.add_header("User-agent", useragent()) try: res = urllib.request.urlopen(req).readlines() except urllib.error.URLError as e: logging.warn("tinyurl - %s - URLError: %s" % (url, str(e))) return except urllib.error.HTTPError as e: logging.warn("tinyurl - %s - HTTP error: %s" % (url, str(e))) return except Exception as ex: if "DownloadError" in str(ex): logging.warn("tinyurl - %s - DownloadError: %s" % (url, str(e))) else: handle_exception() return urls = [] for line in res: bline = str(line, "utf-8") if bline.startswith("<blockquote><b>"): urls.append(striphtml(bline.strip()).split("[Open")[0]) if len(urls) == 3: urls.pop(0) set(url, json.dumps(urls), namespace="tinyurl") return urls
def close(self, bugId, reporter, message, action='fixed'): actions = ['fixed', 'invalid', 'wontfix', 'duplicate', 'worksforme'] # input check assert bugId.isdigit(), "bug id has to be a number" assert action in actions, "action has to be one of: %s" % ', '.join(actions) showdata = self.show(bugId) postdata = { 'reporter': reporter, 'comment': message, 'action': 'resolve', 'type': 'defect', 'resolve_resolution': action, 'summary': showdata['summary'], 'priority': showdata['priority'], 'milestone': showdata['milestone'], 'component': showdata['component'], 'version': showdata['version'], 'keywords': showdata['keywords'], 'cc': '', } postdata = urllib.parse.urlencode(postdata) req = urllib.request.Request('%s/ticket/%s' % (self.url, bugId), data=postdata) req.add_header('User-agent', useragent()) return urllib.request.urlopen(req).read()
def list(self): bugs = feedparser.parse(self.list_url(), agent=useragent()) return bugs.entries