parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues(self.conn)[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e) else: self.last_mod_date = bugsdb.get_last_modification_date( tracker_id=dbtrk.id) if self.last_mod_date: # self.url = self.url + "&updated:after=" + last_mod_date printdbg("Last bugs cached were modified at: %s" % self.last_mod_date) bugs_number = self.bugsNumber(self.url) print "Tickets to be retrieved:", str(bugs_number) remaining = bugs_number while (remaining > 0): self.analyze_bug_list(self.max_issues, bugs_number - remaining, bugsdb, dbtrk.id) remaining -= self.max_issues #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")" time.sleep(self.delay) printout("Done. %s bugs analyzed" % (bugs_number)) Backend.register_backend("jira", JiraBackend)
for raw_rq in raw_rqs: rq = self.get_review_request(raw_rq) # Insert review request self.db.insert_issue(rq, dbtrk.id) nrqs += 1 time.sleep(self.delay) offset += self.max_issues printout("Fetching reviews requests from %s to %s" % (offset, offset + self.max_issues)) result = self.api_client.review_requests(offset=offset, limit=self.max_issues, group=self.group, last_date=last_mod_date) raw_rqs = result['review_requests'] printout("Done. %s review requests analyzed from %s" % (nrqs, total_rqs)) def run(self): printout("Running Bicho - url: %s" % self.url) try: self.fetch_and_store() except (requests.exceptions.HTTPError, ReviewBoardAPIError), e: printerr("Error: %s" % e) sys.exit(1) Backend.register_backend('reviewboard', ReviewBoard)
dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e printdbg("Getting ticket number " + str(bug["number"])) time.sleep(self.delay) self.pagecont += 1 try: bugs = self.__get_batch_bugs() except GitHubRateLimitReached: printout( "GitHub rate limit reached. To resume, wait some minutes.") sys.exit(0) nbugs = nbugs + len(bugs) #end while printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("github", GithubBackend)
if last_mod_date: printdbg("Last modification date stored: %s" % last_mod_date) trac_tickets = self.trac_rpc.tickets(last_mod_date) for ticket_id in trac_tickets: printdbg("Fetching ticket %s" % str(ticket_id)) ticket = self.trac_rpc.ticket(ticket_id) issue = self.get_issue_from_ticket(ticket) # Insert issue self.db.insert_issue(issue, dbtrk.id) nbugs += 1 time.sleep(self.delay) printout("Done. %s bugs analyzed from %s" % (nbugs, len(trac_tickets))) def run(self): printout("Running Bicho with delay of %s seconds - %s" % (self.delay, self.url)) try: self.fetch_and_store_tickets() except (requests.exceptions.HTTPError, TracRPCError), e: printerr("Error: %s" % e) sys.exit(1) Backend.register_backend('trac', Trac)
contents = f.read() except Exception: printerr("Error retrieving URL: %s" % (bugs_url)) raise try: parser.feed(contents) parser.close() except Exception: # Clean only the invalid XML try: parser2 = xml.sax.make_parser() parser2.setContentHandler(handler) parser2.setContentHandler(handler) printdbg("Cleaning dirty XML") cleaned_contents = ''. \ join(c for c in contents if valid_XML_char_ordinal(ord(c))) parser2.feed(cleaned_contents) parser2.close() except Exception: printerr("Error parsing URL: %s" % (bugs_url)) raise f.close() def _timestamp_to_str(self, ts): if not ts: return None return ts.strftime('%Y-%m-%d %H:%M:%S') Backend.register_backend("bg", BGBackend)
while True: last_page += 1 url = self.url_issues + "&page=" + str(last_page) request = urllib2.Request(url) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) if len(tickets['issues']) == 0: break pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) time.sleep(self.delay) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page)) Backend.register_backend('redmine', Redmine)
printout("Fetching tasks from %s id to %s id" % (count, count + self.max_issues - 1)) else: printout("Fetching tasks from %s to %s" % (count, count + self.max_issues)) ph_tasks = self.conduit.tasks(offset=count, limit=self.max_issues, as_id=as_id) if not ph_tasks: printdbg("No more tasks fetched") printout("Up to date") printout("Done. %s bugs analyzed" % (nbugs)) def run(self): printout("Running Bicho - %s" % self.url) if not self.check_auth(): sys.exit(1) try: self.fetch_and_store_tasks() except (requests.exceptions.HTTPError, ConduitError), e: printerr("Error: %s" % e) sys.exit(1) Backend.register_backend('maniphest', Maniphest)
issue = jira.issue(bug_key,expand='changelog') self.analyze_bug_list(issue, self.serverUrl+'/browse/', bugsdb, dbtrk.id) except Exception, e: #printerr(e) print(e) else: self.last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if self.last_mod_date: # self.url = self.url + "&updated:after=" + last_mod_date printdbg("Last bugs cached were modified at: %s" % self.last_mod_date) bugs_number = self.bugsNumber(jira) print "Tickets to be retrieved:", str(bugs_number) remaining = bugs_number while (remaining > 0): startAtIssue = bugs_number-remaining jira = JIRA(options_jira) issuesAux = jira.search_issues('project=' + self.projectName + ' order by id asc',startAt=startAtIssue,maxResults=issues_per_query,fields=None) issues=[] for i in issuesAux: issues.append(jira.issue(i.key, expand='changelog')) self.analyze_bug_list(issues, self.serverUrl+'/browse/', bugsdb, dbtrk.id) remaining -= issues_per_query #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")" time.sleep(self.delay) printout("Done. %s bugs analyzed" % (bugs_number)) Backend.register_backend("atljira", JiraBackend)
if field not in qs: raise NotValidURLError('Missing field %s' % field) else: raise NotValidURLError('Missing URL query set') def __order_query(self, url): """ """ query = urlparse.urlsplit(url).query query = query.split("&") query.sort() parameter = "" for q in query: if q.find("atid") > -1: parameter = parameter + "&" + q if q.find("group_id") > -1: parameter = parameter + "&" + q aux_url = (url.split("/?")[0] + "/?" + parameter).replace("?&", "?") self.url = aux_url Backend.register_backend('sf', SourceForge) if __name__ == "__main__": url = "http://sourceforge.net/tracker/?func=detail&aid=3178299&group_id=152568&atid=784665" html = urllib2.urlopen(url) parser = SourceForgeParser() parser.parse_issue(html)
except Exception: printerr("Error retrieving URL: %s" % (bugs_url)) raise try: parser.feed(contents) parser.close() except Exception: # Clean only the invalid XML try: parser2 = xml.sax.make_parser() parser2.setContentHandler(handler) parser2.setContentHandler(handler) printdbg("Cleaning dirty XML") cleaned_contents = ''. \ join(c for c in contents if valid_XML_char_ordinal(ord(c))) parser2.feed(cleaned_contents) parser2.close() except Exception: printerr("Error parsing URL: %s" % (bugs_url)) raise f.close() def _timestamp_to_str(self, ts): if not ts: return None return ts.strftime('%Y-%m-%d %H:%M:%S') Backend.register_backend("bg", BGBackend)
request = urllib2.Request(self.url_issues, headers=headers) f = urllib2.urlopen(request) issues = json.loads(f.read()) request = urllib2.Request(self.url_tasks, headers=headers) f = urllib2.urlopen(request) tasks = json.loads(f.read()) request = urllib2.Request(self.url_userstories, headers=headers) f = urllib2.urlopen(request) userstories = json.loads(f.read()) total_issues = len(issues) total_pages = total_issues / issues_per_query print("Number of tickets: " + str(len(issues))) print("Number of tasks: " + str(len(tasks))) print("Number of user stories: " + str(len(userstories))) if total_issues == 0: logging.info("No bugs found. Did you provide the correct url?") sys.exit(0) # print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)" nissues = self.parse_issues(issues, self.url_history_issue, auth_token, bugsdb, dbtrk_issues.id) logging.info("Done. Issues analyzed:" + str(nissues)) ntasks = self.parse_issues(tasks, self.url_history_task, auth_token, bugsdb, dbtrk_tasks.id) logging.info("Done. Tasks analyzed:" + str(ntasks)) nuserstories = self.parse_issues(userstories, self.url_history_userstory, auth_token, bugsdb, dbtrk_userstories.id) logging.info("Done. User stories analyzed:" + str(nuserstories)) Backend.register_backend('taiga', Taiga)
while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue)) start_issue += issues_per_query printout("Done. %s bugs analyzed" % (total_issues - remaining)) Backend.register_backend('googlecode', GoogleCode)
def run(self): self.debug = False logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') logging.info("Running StoryBoard bicho backend") self.items_per_query = 500 # max limit by default in https://storyboard.openstack.org/api/v1/ if (self.debug): self.items_per_query = 10 # debug self.bugsdb = get_database(DBStoryBoardBackend()) self.bugsdb.insert_supported_traker("storyboard", "beta") trk = Tracker(Config.url, "storyboard", "beta") self.dbtrk = self.bugsdb.insert_tracker(trk) self.last_mod_date = self.bugsdb.get_last_modification_date() if self.last_mod_date: logging.info("Last bugs analyzed were modified on: %s" % self.last_mod_date) self.analyze_users() self.analyze_tasks() self.analyze_stories_events() self.check_tasks_events() Backend.register_backend('storyboard', StoryBoard)
printdbg("Last modification date stored: %s" % last_mod_date) trac_tickets = self.trac_rpc.tickets(last_mod_date) for ticket_id in trac_tickets: printdbg("Fetching ticket %s" % str(ticket_id)) ticket = self.trac_rpc.ticket(ticket_id) issue = self.get_issue_from_ticket(ticket) # Insert issue self.db.insert_issue(issue, dbtrk.id) nbugs += 1 time.sleep(self.delay) printout("Done. %s bugs analyzed from %s" % (nbugs, len(trac_tickets))) def run(self): printout("Running Bicho with delay of %s seconds - %s" % (self.delay, self.url)) try: self.fetch_and_store_tickets() except (requests.exceptions.HTTPError, TracRPCError), e: printerr("Error: %s" % e) sys.exit(1) Backend.register_backend('trac', Trac)
if as_id: printout("Fetching tasks from %s id to %s id" % (count, count + self.max_issues - 1)) else: printout("Fetching tasks from %s to %s" % (count, count + self.max_issues)) ph_tasks = self.conduit.tasks(offset=count, limit=self.max_issues, as_id=as_id) if not ph_tasks: printdbg("No more tasks fetched") printout("Up to date") printout("Done. %s bugs analyzed" % (nbugs)) def run(self): printout("Running Bicho - %s" % self.url) if not self.check_auth(): sys.exit(1) try: self.fetch_and_store_tasks() except (requests.exceptions.HTTPError, ConduitError), e: printerr("Error: %s" % e) sys.exit(1) Backend.register_backend('maniphest', Maniphest)
while (number_results == limit or number_results == limit + 1): # wikimedia gerrit returns limit+1 # ordered by lastUpdated tickets = self.getReviews(limit, last_item) number_results = 0 reviews = [] for entry in tickets: if 'project' in entry.keys(): if (entry['lastUpdated'] < last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) last_item = entry['sortKey'] # extra changes not included in gerrit changes # self.add_merged_abandoned_changes_from_comments(entry, review_data) self.add_merged_abandoned_changes(entry, review_data) self.add_new_change(review_data) bugsdb.insert_issue(review_data, dbtrk.id) number_results += 1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + last_item) total_reviews = total_reviews + int(number_results) self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id) print("Done. Number of reviews: " + str(total_reviews)) Backend.register_backend('gerrit', Gerrit)
f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues - remaining)) Backend.register_backend('allura', Allura)
if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay) try: # we read the temporary table with the relationships and create # the final one bugsdb.store_final_relationships() except: raise printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("lp", LPBackend)
printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m", " issues ", str( remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue)) start_issue += issues_per_query printout("Done. %s bugs analyzed" % (total_issues - remaining)) Backend.register_backend('googlecode', GoogleCode)
# last_item = "001f672c00002f80"; last_item = entry['sortKey'] if (entry['lastUpdated'] < last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) if review_data is None: pprint.pprint("ERROR in review. Ignoring it.") continue # extra changes not included in gerrit changes # self.add_merged_abandoned_changes_from_comments(entry, review_data) self.add_merged_abandoned_changes(entry, review_data) self.add_uploaded_patchset_from_comments( entry, review_data) self.add_new_change(review_data) bugsdb.insert_issue(review_data, dbtrk.id) number_results += 1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + str(last_item)) total_reviews = total_reviews + int(number_results) self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id) print("Done. Number of reviews: " + str(total_reviews)) Backend.register_backend('gerrit', Gerrit)
last_ticket = tickets["issues"][0]['id'] while True: last_page += 1 url = self.url_issues + "&page=" + str(last_page) request = urllib2.Request(url) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) if len(tickets['issues']) == 0: break pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) time.sleep(self.delay) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page)) Backend.register_backend('redmine', Redmine)
if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e printdbg ("Getting ticket number " + str(bug["number"])) time.sleep(self.delay) self.pagecont += 1 try: bugs = self.__get_batch_bugs() except GitHubRateLimitReached: printout("GitHub rate limit reached. To resume, wait some minutes.") sys.exit(0) nbugs = nbugs + len(bugs) #end while printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("github", GithubBackend)
ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues - remaining)) Backend.register_backend('allura', Allura)
dbtrk = bugsdb.insert_tracker(trk) for p in projects: trk_url = p.web_link.replace("://", "://bugs.") trk = Tracker(trk_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if last_mod_date: bugs = p.searchTasks( status=aux_status, omit_duplicates=False, order_by="date_last_updated", modified_since=last_mod_date ) else: bugs = p.searchTasks(status=aux_status, omit_duplicates=False, order_by="date_last_updated") printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) if nbugs == 0: printout("No bugs found on %s" % p.name) continue else: printout("%s bugs found on %s" % (nbugs, p.name)) self.analyze_project_bugs(bugs, dbtrk, bugsdb) Backend.register_backend("lp", LPBackend)
# Insert review request self.db.insert_issue(rq, dbtrk.id) nrqs += 1 time.sleep(self.delay) offset += self.max_issues printout("Fetching reviews requests from %s to %s" % (offset, offset + self.max_issues)) result = self.api_client.review_requests(offset=offset, limit=self.max_issues, group=self.group, last_date=last_mod_date) raw_rqs = result['review_requests'] printout("Done. %s review requests analyzed from %s" % (nrqs, total_rqs)) def run(self): printout("Running Bicho - url: %s" % self.url) try: self.fetch_and_store() except (requests.exceptions.HTTPError, ReviewBoardAPIError), e: printerr("Error: %s" % e) sys.exit(1) Backend.register_backend('reviewboard', ReviewBoard)