def main(args): optparser = get_optparser() opts, args = optparser.parse_args() def verify_required_option(option): if getattr(opts, option) is None: optparser.error("Missing required option '%s'" % option) verify_required_option('project') verify_required_option('user') verify_required_option('tracdb') print "Loading Trac tickets..." issues = list(load_trac_tickets(opts.tracdb)) populate_issue_comments(opts.tracdb, issues) if opts.list: import pprint pprint.pprint( list((i.google_code_dict, [x.google_code_dict for x in i.comments]) for i in issues if i.comments)) sys.exit(0) client = get_issues_client(opts.user) issues_feed = client.get_issues(opts.project) for issue in issues_feed.entry: print issue.title.text
def retrieving_issues_using_query_parameters(self, client, project_name): """Retrieve a set of issues in a project.""" query = gdata.projecthosting.client.Query(label="label0", max_results=1000) feed = client.get_issues(project_name, query=query) for issue in feed.entry: self.assert_(issue.title.text is not None) return feed
def main(args): optparser = get_optparser() opts, args = optparser.parse_args() def verify_required_option(option): if getattr(opts,option) is None: optparser.error("Missing required option '%s'" % option) verify_required_option('project') verify_required_option('user') verify_required_option('tracdb') print "Loading Trac tickets..." issues = list(load_trac_tickets(opts.tracdb)) populate_issue_comments(opts.tracdb, issues) if opts.list: import pprint pprint.pprint(list((i.google_code_dict, [x.google_code_dict for x in i.comments]) for i in issues if i.comments)) sys.exit(0) client = get_issues_client(opts.user) issues_feed = client.get_issues(opts.project) for issue in issues_feed.entry: print issue.title.text
def retrieving_issues_using_query_parameters(self, client, project_name): """Retrieve a set of issues in a project.""" query = gdata.projecthosting.client.Query(label='label0', max_results=1000) feed = client.get_issues(project_name, query=query) for issue in feed.entry: self.assert_(issue.title.text is not None) return feed
def all_open_issues(client, project_name): """Retrieve a set of issues in a project. Returns a list of IssueEntry objects where the issue is not in closed state.""" query = gdata.projecthosting.client.Query(max_results=1024*1024) feed = client.get_issues(project_name, query=query) out = [] for issue in feed.entry: if issue.state.text == "closed": continue out.append(issue) return out
def all_open_issues(client, project_name): """Retrieve a set of issues in a project. Returns a list of IssueEntry objects where the issue is not in closed state.""" query = gdata.projecthosting.client.Query(max_results=1024 * 1024) feed = client.get_issues(project_name, query=query) out = [] for issue in feed.entry: if issue.state.text == "closed": continue out.append(issue) return out
def all(self): """Retrieve all the issues in a project.""" #data = memcache.get("issues_all") #if data is not None: # return data, True client = gdata.projecthosting.client.ProjectHostingClient() client.client_login( conf.USER_NAME, conf.USER_PASS, source='flightgear-bot', service='code') feed = client.get_issues(conf.GOOGLE_PROJECT) data = [] print sfeed for issue in feed.entry: dic = process_entry(issue) data.append(dic) if not memcache.set("issues_all", data, 60): print "error" return data
def retrieving_all_issues(self, client, project_name): """Retrieve all the issues in a project.""" feed = client.get_issues(project_name) for issue in feed.entry: self.assert_(issue.title.text is not None)
def get_all_issues(client, db): logging.info("Collecting issues...") next_retry = 0 same_retry = 0 cursor = db.cursor() i = 1 while True: # Do we already have info about this issue? cursor.execute("SELECT COUNT(*) FROM %s where ROWID=?" % (conf["project"],), (i,)) if cursor.fetchall()[0][0] > 0: logging.debug("Issue %s exists in cache. Skipping." % (i,)) i += 1 continue # Get the issue from Gcode query = gdata.projecthosting.client.Query(issue_id=i, max_results=1) try: feed = client.get_issues(conf["project"], query=query) comments_feed = client.get_comments(conf["project"], i) except gdata.client.RequestError, e: if re.match("Server responded with: (403|404)", e.message): # this issue is inaccessible, try the next one logging.warning(e.message) if next_retry > conf["next_retry"]: logging.warning("Issue %i: Giving up." % (i,)) break else: logging.warning("Issue %i: Skipping." % (i,)) i += 1 next_retry += 1 continue elif re.match("Server responded with: (500)", e.message): # try this issue again if same_retry > conf["same_retry"]: logging.warning(e.message) logging.warning("Issue %i: Giving up." % (i,)) i += 1 continue else: logging.debug(e.message) logging.debug("Issue %i: Trying again." % (i,)) same_retry += 1 continue else: raise issue = feed.entry[0] closed_date = get_closed_date(comments_feed.entry) if closed_date: logging.debug("Issue %s opened at %s closed at %s" % (i, issue.published.text, closed_date)) cursor.execute("INSERT INTO %s (ROWID,opened,closed) VALUES (?,?,?)" % (conf["project"],), (i, issue.published.text, closed_date)) else: logging.debug("Issue %s opened at %s still open" % (i, issue.published.text)) cursor.execute("INSERT INTO %s (ROWID,opened) VALUES (?,?)" % (conf["project"],), (i, issue.published.text,)) next_retry = 0 same_retry = 0 i += 1