def run(self): printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_xml_query = 500 bugsdb = get_database(DBJiraBackend()) bugsdb.insert_supported_traker("jira", "4.1.2") trk = Tracker(self.url.split("-")[0], "jira", "4.1.2") dbtrk = bugsdb.insert_tracker(trk) serverUrl = self.url.split("/browse/")[0] query = "/si/jira.issueviews:issue-xml/" project = self.url.split("/browse/")[1] if (project.split("-").__len__() > 1): bug_key = project project = project.split("-")[0] bugs_number = 1 printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml") parser = xml.sax.make_parser() handler = BugsHandler() parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e)
def run(self): printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_xml_query = 500 bugsdb = get_database(DBJiraBackend()) bugsdb.insert_supported_traker("jira","4.1.2") trk = Tracker(self.url.split("-")[0], "jira", "4.1.2") dbtrk = bugsdb.insert_tracker(trk) serverUrl = self.url.split("/browse/")[0] query = "/si/jira.issueviews:issue-xml/" project = self.url.split("/browse/")[1] if (project.split("-").__len__() > 1): bug_key = project project = project.split("-")[0] bugs_number = 1 printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml") parser = xml.sax.make_parser( ) handler = BugsHandler( ) parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e)
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_query = 250 start_issue=1 bugs = []; bugsdb = get_database (DBGoogleCodeBackend()) # still useless bugsdb.insert_supported_traker("googlecode", "beta") trk = Tracker (Config.url, "googlecode", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url = Config.url # https://code.google.com/feeds/issues/p/mobile-time-care self.url_issues = Config.url + "/issues/full?max-results=1" printdbg("URL for getting metadata " + self.url_issues) d = feedparser.parse(self.url_issues) total_issues = int(d['feed']['opensearch_totalresults']) print "Total bugs: ", total_issues if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues*Config.delay)/(60), "m (", (total_issues*Config.delay)/(60*60), "h)" while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining)*Config.delay/60, "m", " issues ", str(remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # redmine 1.0 support last_page = 1 tickets_page = 25 # fixed redmine bugs = [] bugsdb = get_database(DBRedmineBackend()) # still useless in redmine bugsdb.insert_supported_traker("redmine", "beta") trk = Tracker(Config.url, "redmine", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url_issues = Config.url + "issues.json?status_id=*&sort=updated_on&page=" + str(last_page) request = urllib2.Request(self.url_issues) if self.backend_user: base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) # Get statuses self._get_statuses() f = urllib2.urlopen(request) tickets = json.loads(f.read()) for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) last_ticket = tickets["issues"][0]['id'] while True: last_page += 1 self.url_issues = Config.url + "issues.json?status_id=*&sort=updated_on&page=" + str(last_page) request = urllib2.Request(self.url_issues) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) if len(tickets['issues']) == 0: break pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # redmine 1.0 support last_page=1 tickets_page = 25 # fixed redmine bugs = []; bugsdb = get_database (DBRedmineBackend()) # still useless in redmine bugsdb.insert_supported_traker("redmine", "beta") trk = Tracker (Config.url, "redmine", "beta") dbtrk = bugsdb.insert_tracker(trk) if Config.url.find('?') > 0: self.url_issues = Config.url+"&status_id=*&sort=updated_on&page=" + str(last_page) else: self.url_issues = Config.url+"?status_id=*&sort=updated_on&page=" + str(last_page) request = urllib2.Request(self.url_issues) if self.backend_user: base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) last_ticket=tickets["issues"][0]['id'] while True: last_page += 1 if Config.url.find('?') > 0: self.url_issues = Config.url+"&status_id=*&sort=updated_on&page="+str(last_page) else: self.url_issues = Config.url+"?status_id=*&sort=updated_on&page="+str(last_page) request = urllib2.Request(self.url_issues) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) bugs = [] bugsdb = get_database(DBGerritBackend()) # still useless in gerrit bugsdb.insert_supported_traker("gerrit", "beta") trk = Tracker(Config.url + "_" + Config.gerrit_project, "gerrit", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_time = 0 last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if last_mod_date: printdbg("Last reviews analyzed were modified on date: %s" % last_mod_date) last_mod_time = time.mktime(time.strptime(last_mod_date, '%Y-%m-%d %H:%M:%S')) limit = 500 # gerrit default 500 last_item = "" # last_item = "001f672c00002f80"; number_results = limit total_reviews = 0 while (number_results == limit or number_results == limit + 1): # wikimedia gerrit returns limit+1 # ordered by lastUpdated tickets = self.getReviews(limit, last_item) number_results = 0 reviews = [] for entry in tickets: if 'project' in entry.keys(): if (entry['lastUpdated'] < last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) last_item = entry['sortKey'] # extra changes not included in gerrit changes # self.add_merged_abandoned_changes_from_comments(entry, review_data) self.add_merged_abandoned_changes(entry, review_data) self.add_new_change(review_data) bugsdb.insert_issue(review_data, dbtrk.id) number_results += 1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + last_item) total_reviews = total_reviews + int(number_results) self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id) print("Done. Number of reviews: " + str(total_reviews))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) bugs = []; bugsdb = get_database (DBGerritBackend()) # still useless in gerrit bugsdb.insert_supported_traker("gerrit", "beta") trk = Tracker (Config.url+"_"+Config.gerrit_project, "gerrit", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_time = 0 last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if last_mod_date: printdbg("Last reviews analyzed were modified on date: %s" % last_mod_date) last_mod_time = time.mktime(time.strptime (last_mod_date, '%Y-%m-%d %H:%M:%S')) limit = 500 # gerrit default 500 last_item = ""; # last_item = "001f672c00002f80"; number_results = limit total_reviews = 0 while (number_results == limit or number_results == limit+1): # wikimedia gerrit returns limit+1 # ordered by lastUpdated tickets = self.getReviews(limit, last_item) number_results = 0 reviews = [] for entry in tickets: if 'project' in entry.keys(): if (entry['lastUpdated']<last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) last_item = entry['sortKey'] bugsdb.insert_issue(review_data, dbtrk.id) number_results = number_results+1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + last_item) total_reviews = total_reviews + int(number_results) print("Done. Number of reviews: " + str(total_reviews))
def setUpBackend(): backend_name = 'allura' Config.delay = 1 Config.debug = True Config.url = "http://sourceforge.net/rest/p/allura/tickets" AlluraTest.setUpDB() AlluraTest.issuesDB = get_database (DBAlluraBackend()) AlluraTest.issuesDB.insert_supported_traker(backend_name, "beta") AlluraTest.tracker = Tracker (Config.url, backend_name, "beta") AlluraTest.dbtracker = AlluraTest.issuesDB.insert_tracker(AlluraTest.tracker) AlluraTest.tests_data_dir = os.path.join('./data/', AlluraTest.tracker.name) AlluraTest.backend = Backend.create_backend(backend_name) if not os.path.isdir (AlluraTest.tests_data_dir): os.makedirs (AlluraTest.tests_data_dir)
def run(self, url): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) self.url = url ids = [] self.parser = SourceForgeParser() # first we take the bugs ids if url.find("aid=") > 0: aux = url.split("aid=")[1].split("&")[0] ids.append(aux) else: ids = self.__get_issues_list(self.url) self.__check_tracker_url(self.url) # order the parameters in the url to add the same tracker url # to data base without aid parameter self.__order_query(self.url) self.db = get_database(DBSourceForgeBackend()) self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0], SUPPORTED_SF_TRACKERS[1]) self.__insert_tracker(self.url) nbugs = len(ids) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) nbugs = len(ids) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) for id in ids: url = self.url + "&func=detail&aid=%s" % id # FIXME:urls!!! printdbg(url) issue = self.__get_issue(url) self.__insert_issue(issue) time.sleep(self.delay) printout("Done. %s bugs analyzed" % (nbugs))
def __init__ (self): self.url = self._healthy_url(Config.url) self.delay = Config.delay self.cookies = {} self.version = None self.tracker = None self.retrieved = {} # retrieved issues on this run try: self.backend_password = Config.backend_password self.backend_user = Config.backend_user except AttributeError: printout("No bugzilla account provided, mail addresses won't " +\ "be retrieved") self.backend_password = None self.backend_user = None self.bugsdb = get_database(DBBugzillaBackend())
def __init__(self): self.url = self._healthy_url(Config.url) self.delay = Config.delay self.cookies = {} self.version = None self.tracker = None self.retrieved = {} # retrieved issues on this run try: self.backend_password = Config.backend_password self.backend_user = Config.backend_user except AttributeError: printout("No bugzilla account provided, mail addresses won't " +\ "be retrieved") self.backend_password = None self.backend_user = None self.bugsdb = get_database(DBBugzillaBackend())
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) ids = [] self.parser = SourceForgeParser() #first we take the bugs ids if url.find("aid=") > 0: aux = url.split("aid=")[1].split("&")[0] ids.append(aux) else: ids = self.__get_issues_list(self.url) self.__check_tracker_url(self.url) #order the parameters in the url to add the same tracker url #to data base without aid parameter self.__order_query(self.url) self.db = get_database(DBSourceForgeBackend()) self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0], SUPPORTED_SF_TRACKERS[1]) self.__insert_tracker(self.url) nbugs = len(ids) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) for id in ids: url = self.url + '&func=detail&aid=%s' % id # FIXME:urls!!! printdbg(url) issue = self.__get_issue(url) self.__insert_issue(issue) time.sleep(self.delay) printout("Done. %s bugs analyzed" % (nbugs))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # limit=-1 is NOT recognized as 'all'. 500 is a reasonable limit. - allura code issues_per_query = 500 start_page = 0 bugs = [] bugsdb = get_database(DBAlluraBackend()) # still useless in allura bugsdb.insert_supported_traker("allura", "beta") trk = Tracker(Config.url, "allura", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date() # Date before the first ticket time_window_start = "1900-01-01T00:00:00Z" time_window_end = datetime.now().isoformat() + "Z" if last_mod_date: time_window_start = last_mod_date printdbg("Last bugs analyzed were modified on: %s" % last_mod_date) time_window = time_window_start + " TO " + time_window_end self.url_issues = Config.url + "/search/?limit=1" self.url_issues += "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]") printdbg("URL for getting metadata " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketTotal = json.loads(f.read()) total_issues = int(ticketTotal['count']) total_pages = total_issues / issues_per_query print("Number of tickets: " + str(total_issues)) if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)" while start_page <= total_pages: self.url_issues = Config.url + "/search/?limit=" + str(issues_per_query) self.url_issues += "&page=" + str(start_page) + "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]") # Order by mod_date_dt desc self.url_issues += "&sort=mod_date_dt+asc" printdbg("URL for next issues " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue))
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)"] last_mod_date = bugsdb.get_last_modification_date() if last_mod_date: bugs = self.lp.projects[pname].searchTasks(status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks(status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) # if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue # for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay)
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) bugsdb = get_database(DBGithubBackend()) url = self.url pname = None pname = self.__get_project_from_url() printdbg(url) self.bugs_state = "open" self.pagecont = 1 self.mod_date_open = None self.mod_date_closed = None ## FIXME tracker must be also checked!!! aux_date_open = bugsdb.get_last_modification_date(state="open") if aux_date_open: self.mod_date_open = aux_date_open.isoformat() aux_date_closed = bugsdb.get_last_modification_date(state="closed") if aux_date_closed: self.mod_date_closed = aux_date_closed.isoformat() printdbg("Last open bug already cached: %s" % self.mod_date_open) printdbg("Last closed bug already cached: %s" % self.mod_date_closed) bugs = self.__get_batch_bugs() nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("github", "v3") trk = Tracker(url, "github", "v3") dbtrk = bugsdb.insert_tracker(trk) # if len(bugs) == 0: if aux_date_open or aux_date_closed: printout("Bicho database up to date") else: printout("No bugs found. Did you provide the correct url?") sys.exit(0) auxcont = 0 while len(bugs) > 0: for bug in bugs: try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the # original tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e time.sleep(self.delay) self.pagecont += 1 bugs = self.__get_batch_bugs() nbugs = nbugs + len(bugs)
def run (self, url): print("Running Bicho with delay of %s seconds" % (str(self.delay))) #retrieving data in csv format if not self.url: self.url = url bugsdb = get_database (DBBugzillaBackend()) url = self.url + "&ctype=csv" printdbg(url) #The url is a bug if url.find("show_bug.cgi")>0: bugs = [] bugs.append(self.url.split("show_bug.cgi?id=")[1]) else: f = urllib.urlopen(url) #Problems using csv library, not all the fields are delimited by # '"' character. Easier using split. bugList_csv = f.read().split('\n') bugs = [] #Ignoring first row for bug_csv in bugList_csv[1:]: #First field is the id field, necessary to later create the url #to retrieve bug information bugs.append(bug_csv.split(',')[0]) nbugs = len(bugs) nbugs = len(bugs) url = self.url url = self.get_domain(url) if url.find("apache")>0: url = url + "bugzilla/" # still useless bugsdb.insert_supported_traker("bugzilla", "3.2.3") trk = Tracker ( url, "bugzilla", "3.2.3") dbtrk = bugsdb.insert_tracker(trk) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) for bug in bugs: #The URL from bugzilla (so far KDE and GNOME) are like: #http://<domain>/show_bug.cgi?id=<bugid>&ctype=xml try: issue_data = self.analyze_bug(bug, url) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: %s and Bug: %s" % (url,bug)) #print e #continue raise try: bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) time.sleep(self.delay) printout("Done. %s bugs analyzed" % (nbugs))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_query = 250 start_issue = 1 bugs = [] bugsdb = get_database(DBGoogleCodeBackend()) # still useless bugsdb.insert_supported_traker("googlecode", "beta") trk = Tracker(Config.url, "googlecode", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url = Config.url # https://code.google.com/feeds/issues/p/mobile-time-care self.url_issues = Config.url + "/issues/full?max-results=1" printdbg("URL for getting metadata " + self.url_issues) d = feedparser.parse(self.url_issues) total_issues = int(d['feed']['opensearch_totalresults']) print "Total bugs: ", total_issues if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues * Config.delay) / (60), "m (", ( total_issues * Config.delay) / (60 * 60), "h)" while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str( issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m", " issues ", str( remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue))
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = [ "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)" ] last_mod_date = bugsdb.get_last_modification_date() if last_mod_date: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) # if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue #for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay)
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # limit=-1 is NOT recognized as 'all'. 500 is a reasonable limit. - allura code issues_per_query = 500 start_page=0 bugs = []; bugsdb = get_database (DBAlluraBackend()) # still useless in allura bugsdb.insert_supported_traker("allura", "beta") trk = Tracker (Config.url, "allura", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date() # Date before the first ticket time_window_start = "1900-01-01T00:00:00Z" time_window_end = datetime.now().isoformat()+"Z" if last_mod_date: time_window_start = last_mod_date printdbg("Last bugs analyzed were modified on: %s" % last_mod_date) time_window = time_window_start + " TO " + time_window_end self.url_issues = Config.url + "/search/?limit=1" self.url_issues += "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:["+time_window+"]") printdbg("URL for getting metadata " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketTotal = json.loads(f.read()) total_issues = int(ticketTotal['count']) total_pages = total_issues/issues_per_query print("Number of tickets: " + str(total_issues)) if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues*Config.delay)/(60), "m (", (total_issues*Config.delay)/(60*60), "h)" while start_page <= total_pages: self.url_issues = Config.url + "/search/?limit="+str(issues_per_query) self.url_issues += "&page=" + str(start_page) + "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:["+time_window+"]") # Order by mod_date_dt desc self.url_issues += "&sort=mod_date_dt+asc" printdbg("URL for next issues " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs=[] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url+"/"+str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining)*Config.delay/60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue))