def setUpBackend(): backend_name = 'allura' Config.delay = 1 Config.debug = True Config.url = "http://sourceforge.net/rest/p/allura/tickets" AlluraTest.setUpDB() AlluraTest.issuesDB = get_database (DBAlluraBackend()) AlluraTest.issuesDB.insert_supported_traker(backend_name, "beta") AlluraTest.tracker = Tracker (Config.url, backend_name, "beta") AlluraTest.dbtracker = AlluraTest.issuesDB.insert_tracker(AlluraTest.tracker) AlluraTest.tests_data_dir = os.path.join('./data/', AlluraTest.tracker.name) AlluraTest.backend = Backend.create_backend(backend_name) if not os.path.isdir (AlluraTest.tests_data_dir): os.makedirs (AlluraTest.tests_data_dir)
tickets = json.loads(f.read()) for ticket in tickets: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) last_ticket=tickets[0]['id'] while True: last_page += 1 self.url_issues = Config.url+"?status_id=*&page="+str(last_page) request = urllib2.Request(self.url_issues) base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) pprint.pprint("Tickets read: " + str(tickets[0]['id']) + " " + str(tickets[-1]['id'])) if tickets[0]['id'] == last_ticket: break for ticket in tickets: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page)) Backend.register_backend('redmine', Redmine)
parser = xml.sax.make_parser( ) handler = BugsHandler( ) parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e) else: self.last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if self.last_mod_date: # self.url = self.url + "&updated:after=" + last_mod_date printdbg("Last bugs cached were modified at: %s" % self.last_mod_date) bugs_number = self.bugsNumber(self.url) print "Tickets to be retrieved:", str(bugs_number) remaining = bugs_number while (remaining>0): self.analyze_bug_list(issues_per_xml_query, bugs_number-remaining, bugsdb, dbtrk.id) remaining -= issues_per_xml_query #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")" time.sleep(self.delay) printout("Done. %s bugs analyzed" % (bugs_number)) Backend.register_backend ("jira", JiraBackend)
while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue)) start_issue += issues_per_query printout("Done. %s bugs analyzed" % (total_issues - remaining)) Backend.register_backend('googlecode', GoogleCode)
if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay) try: # we read the temporary table with the relationships and create # the final one bugsdb.store_final_relationships() except: raise printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("lp", LPBackend)
if field not in qs: raise NotValidURLError('Missing field %s' % field) else: raise NotValidURLError('Missing URL query set') def __order_query(self, url): """ """ query = urlparse.urlsplit(url).query query = query.split("&") query.sort() parameter = "" for q in query: if q.find("atid") > -1: parameter = parameter + "&" + q if q.find("group_id") > -1: parameter = parameter + "&" + q aux_url = (url.split("/?")[0] + "/?" + parameter).replace("?&", "?") self.url = aux_url Backend.register_backend('sf', SourceForge) if __name__ == "__main__": url = "http://sourceforge.net/tracker/?func=detail&aid=3178299&group_id=152568&atid=784665" html = urllib2.urlopen(url) parser = SourceForgeParser() parser.parse_issue(html)
f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs=[] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url+"/"+str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining)*Config.delay/60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues-remaining)) Backend.register_backend('allura', Allura)
parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e) else: self.last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if self.last_mod_date: # self.url = self.url + "&updated:after=" + last_mod_date printdbg("Last bugs cached were modified at: %s" % self.last_mod_date) bugs_number = self.bugsNumber(self.url) print "Tickets to be retrieved:", str(bugs_number) remaining = bugs_number while (remaining > 0): self.analyze_bug_list(issues_per_xml_query, bugs_number - remaining, bugsdb, dbtrk.id) remaining -= issues_per_xml_query #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")" time.sleep(self.delay) printout("Done. %s bugs analyzed" % (bugs_number)) Backend.register_backend("jira", JiraBackend)
raise try: # we can have meta-trackers but we want to have the # original tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e time.sleep(self.delay) self.pagecont += 1 bugs = self.__get_batch_bugs() nbugs = nbugs + len(bugs) #end while printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("github", GithubBackend)
def __init__ (self): Backend.__init__ (self) options = Config() self.url = options.url self.delay = options.delay
#tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay) try: # we read the temporary table with the relationships and create # the final one bugsdb.store_final_relationships() except: raise printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("lp", LPBackend)
f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues - remaining)) Backend.register_backend("allura", Allura)
except Exception: printerr("Error retrieving URL: %s" % (bugs_url)) raise try: parser.feed(contents) parser.close() except Exception: # Clean only the invalid XML try: parser2 = xml.sax.make_parser() parser2.setContentHandler(handler) parser2.setContentHandler(handler) printdbg("Cleaning dirty XML") cleaned_contents = ''. \ join(c for c in contents if valid_XML_char_ordinal(ord(c))) parser2.feed(cleaned_contents) parser2.close() except Exception: printerr("Error parsing URL: %s" % (bugs_url)) raise f.close() def _timestamp_to_str(self, ts): if not ts: return None return ts.strftime('%Y-%m-%d %H:%M:%S') Backend.register_backend("bg", BGBackend)
'%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the # original tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e time.sleep(self.delay) self.pagecont += 1 bugs = self.__get_batch_bugs() nbugs = nbugs + len(bugs) #end while printout("Done. %s bugs analyzed" % (nbugs)) Backend.register_backend("github", GithubBackend)
f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues - remaining)) Backend.register_backend('allura', Allura)
contents = f.read() except Exception: printerr("Error retrieving URL: %s" % (bugs_url)) raise try: parser.feed(contents) parser.close() except Exception: # Clean only the invalid XML try: parser2 = xml.sax.make_parser() parser2.setContentHandler(handler) parser2.setContentHandler(handler) printdbg("Cleaning dirty XML") cleaned_contents = ''. \ join(c for c in contents if valid_XML_char_ordinal(ord(c))) parser2.feed(cleaned_contents) parser2.close() except Exception: printerr("Error parsing URL: %s" % (bugs_url)) raise f.close() def _timestamp_to_str(self, ts): if not ts: return None return ts.strftime('%Y-%m-%d %H:%M:%S') Backend.register_backend("bg", BGBackend)
printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m", " issues ", str( remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue)) start_issue += issues_per_query printout("Done. %s bugs analyzed" % (total_issues - remaining)) Backend.register_backend('googlecode', GoogleCode)
limit = 500 # gerrit default 500 last_item = ""; # last_item = "001f672c00002f80"; number_results = limit total_reviews = 0 while (number_results == limit or number_results == limit+1): # wikimedia gerrit returns limit+1 # ordered by lastUpdated tickets = self.getReviews(limit, last_item) number_results = 0 reviews = [] for entry in tickets: if 'project' in entry.keys(): if (entry['lastUpdated']<last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) last_item = entry['sortKey'] bugsdb.insert_issue(review_data, dbtrk.id) number_results = number_results+1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + last_item) total_reviews = total_reviews + int(number_results) print("Done. Number of reviews: " + str(total_reviews)) Backend.register_backend('gerrit', Gerrit)
bugsdb.insert_issue(issue, dbtrk.id) last_ticket=tickets["issues"][0]['id'] while True: last_page += 1 if Config.url.find('?') > 0: self.url_issues = Config.url+"&status_id=*&sort=updated_on&page="+str(last_page) else: self.url_issues = Config.url+"?status_id=*&sort=updated_on&page="+str(last_page) request = urllib2.Request(self.url_issues) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page)) Backend.register_backend('redmine', Redmine)