def retrieving_issues_comments_for_an_issue(self, client, project_name, issue_id): """Retrieve all issue comments for an issue.""" comments_feed = client.get_comments(project_name, issue_id) for comment in comments_feed.entry: self.assert_(comment.content is not None) return comments_feed
def retrieving_issues_comments_for_an_issue(self, client, project_name, issue_id): """Retrieve all issue comments for an issue.""" comments_feed = client.get_comments(project_name, issue_id) for comment in comments_feed.entry: self.assertTrue(comment.content is not None) return comments_feed
def get_comments_for_issue(client, project_name, issue_id): issue_id = issue_id.split('/')[-1] query = gdata.projecthosting.client.Query(max_results=1024 * 1024) comments_feed = client.get_comments(project_name, issue_id, query=query) out = [] for comment in comments_feed.entry: theauthor = None for author in comment.author: theauthor = author.name.text if comment.content.text: out.append( (theauthor, comment.content.text, comment.published.text)) return out
def get_comments_for_issue(client, project_name, issue_id): issue_id = issue_id.split('/')[-1] query = gdata.projecthosting.client.Query(max_results=1024*1024) comments_feed = client.get_comments(project_name, issue_id, query=query) out = [] for comment in comments_feed.entry: theauthor = None for author in comment.author: theauthor = author.name.text if comment.content.text: out.append((theauthor, comment.content.text, comment.published.text)) return out
def get_all_issues(client, db): logging.info("Collecting issues...") next_retry = 0 same_retry = 0 cursor = db.cursor() i = 1 while True: # Do we already have info about this issue? cursor.execute("SELECT COUNT(*) FROM %s where ROWID=?" % (conf["project"],), (i,)) if cursor.fetchall()[0][0] > 0: logging.debug("Issue %s exists in cache. Skipping." % (i,)) i += 1 continue # Get the issue from Gcode query = gdata.projecthosting.client.Query(issue_id=i, max_results=1) try: feed = client.get_issues(conf["project"], query=query) comments_feed = client.get_comments(conf["project"], i) except gdata.client.RequestError, e: if re.match("Server responded with: (403|404)", e.message): # this issue is inaccessible, try the next one logging.warning(e.message) if next_retry > conf["next_retry"]: logging.warning("Issue %i: Giving up." % (i,)) break else: logging.warning("Issue %i: Skipping." % (i,)) i += 1 next_retry += 1 continue elif re.match("Server responded with: (500)", e.message): # try this issue again if same_retry > conf["same_retry"]: logging.warning(e.message) logging.warning("Issue %i: Giving up." % (i,)) i += 1 continue else: logging.debug(e.message) logging.debug("Issue %i: Trying again." % (i,)) same_retry += 1 continue else: raise issue = feed.entry[0] closed_date = get_closed_date(comments_feed.entry) if closed_date: logging.debug("Issue %s opened at %s closed at %s" % (i, issue.published.text, closed_date)) cursor.execute("INSERT INTO %s (ROWID,opened,closed) VALUES (?,?,?)" % (conf["project"],), (i, issue.published.text, closed_date)) else: logging.debug("Issue %s opened at %s still open" % (i, issue.published.text)) cursor.execute("INSERT INTO %s (ROWID,opened) VALUES (?,?)" % (conf["project"],), (i, issue.published.text,)) next_retry = 0 same_retry = 0 i += 1