def run(self): printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_xml_query = 500 bugsdb = get_database(DBJiraBackend()) bugsdb.insert_supported_traker("jira","4.1.2") trk = Tracker(self.url.split("-")[0], "jira", "4.1.2") dbtrk = bugsdb.insert_tracker(trk) serverUrl = self.url.split("/browse/")[0] query = "/si/jira.issueviews:issue-xml/" project = self.url.split("/browse/")[1] if (project.split("-").__len__() > 1): bug_key = project project = project.split("-")[0] bugs_number = 1 printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml") parser = xml.sax.make_parser( ) handler = BugsHandler( ) parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e)
def _process_issues(self): if self._is_issue_url(self.url): # FIXME: this only works for one issue, if more id parameters # are set, those issues will not be processed ids = [self.url.split("show_bug.cgi?id=")[1]] printdbg("Issue #%s URL found" % ids[0]) url = self._get_domain(self.url) self._retrieve_issues(ids, url, self.tracker.id) else: i = 0 max_rounds = 50 # 50*10000 url = self._get_domain(self.url) last_date, next_date = self._get_last_and_next_dates() # Some bugzillas limit the number of results that a query can return. # Due to this, bicho will search for new issues/changes until find # no one new. ids = self._retrieve_issues_ids(self.url, self.version, next_date) while(ids): if (i>=max_rounds): break printout("Round #%d - Total issues to retrieve: %d" % (i, len(ids))) self._retrieve_issues(ids, url, self.tracker.id) i += 1 # Search new ids, but first, we have to check whether they are # already stored or not last_date, next_date = self._get_last_and_next_dates() ids = self._retrieve_issues_ids(self.url, self.version, last_date) # If there aren't new issues from the same date, ask for a new one if not ids: printdbg("No issues found for date %s. Trying with %s" % (last_date, next_date)) ids = self._retrieve_issues_ids(self.url, self.version, next_date) if i > 0: printout("No more issues to retrieve")
def _login(self): """ Authenticates a user in a bugzilla tracker """ if not (self.backend_user and self.backend_password): printdbg("No account data provided. Not logged in bugzilla") return import cookielib cookie_j = cookielib.CookieJar() cookie_h = urllib2.HTTPCookieProcessor(cookie_j) url = self._get_login_url(self.url) values = {'Bugzilla_login': self.backend_user, 'Bugzilla_password': self.backend_password} opener = urllib2.build_opener(cookie_h) urllib2.install_opener(opener) data = urllib.urlencode(values) request = urllib2.Request(url, data) urllib2.urlopen(request) for i, c in enumerate(cookie_j): self.cookies[c.name] = c.value printout("Logged in bugzilla as %s" % self.backend_user) printdbg("Bugzilla session cookies: %s" % self.cookies)
def run(self): printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_xml_query = 500 bugsdb = get_database(DBJiraBackend()) bugsdb.insert_supported_traker("jira", "4.1.2") trk = Tracker(self.url.split("-")[0], "jira", "4.1.2") dbtrk = bugsdb.insert_tracker(trk) serverUrl = self.url.split("/browse/")[0] query = "/si/jira.issueviews:issue-xml/" project = self.url.split("/browse/")[1] if (project.split("-").__len__() > 1): bug_key = project project = project.split("-")[0] bugs_number = 1 printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml") parser = xml.sax.make_parser() handler = BugsHandler() parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e)
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_query = 250 start_issue=1 bugs = []; bugsdb = get_database (DBGoogleCodeBackend()) # still useless bugsdb.insert_supported_traker("googlecode", "beta") trk = Tracker (Config.url, "googlecode", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url = Config.url # https://code.google.com/feeds/issues/p/mobile-time-care self.url_issues = Config.url + "/issues/full?max-results=1" printdbg("URL for getting metadata " + self.url_issues) d = feedparser.parse(self.url_issues) total_issues = int(d['feed']['opensearch_totalresults']) print "Total bugs: ", total_issues if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues*Config.delay)/(60), "m (", (total_issues*Config.delay)/(60*60), "h)" while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining)*Config.delay/60, "m", " issues ", str(remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # redmine 1.0 support last_page = 1 tickets_page = 25 # fixed redmine bugs = [] bugsdb = get_database(DBRedmineBackend()) # still useless in redmine bugsdb.insert_supported_traker("redmine", "beta") trk = Tracker(Config.url, "redmine", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url_issues = Config.url + "issues.json?status_id=*&sort=updated_on&page=" + str(last_page) request = urllib2.Request(self.url_issues) if self.backend_user: base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) # Get statuses self._get_statuses() f = urllib2.urlopen(request) tickets = json.loads(f.read()) for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) last_ticket = tickets["issues"][0]['id'] while True: last_page += 1 self.url_issues = Config.url + "issues.json?status_id=*&sort=updated_on&page=" + str(last_page) request = urllib2.Request(self.url_issues) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) if len(tickets['issues']) == 0: break pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # redmine 1.0 support last_page=1 tickets_page = 25 # fixed redmine bugs = []; bugsdb = get_database (DBRedmineBackend()) # still useless in redmine bugsdb.insert_supported_traker("redmine", "beta") trk = Tracker (Config.url, "redmine", "beta") dbtrk = bugsdb.insert_tracker(trk) if Config.url.find('?') > 0: self.url_issues = Config.url+"&status_id=*&sort=updated_on&page=" + str(last_page) else: self.url_issues = Config.url+"?status_id=*&sort=updated_on&page=" + str(last_page) request = urllib2.Request(self.url_issues) if self.backend_user: base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) last_ticket=tickets["issues"][0]['id'] while True: last_page += 1 if Config.url.find('?') > 0: self.url_issues = Config.url+"&status_id=*&sort=updated_on&page="+str(last_page) else: self.url_issues = Config.url+"?status_id=*&sort=updated_on&page="+str(last_page) request = urllib2.Request(self.url_issues) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
def __init__(self): self.delay = Config.delay try: self.backend_password = Config.backend_password self.backend_user = Config.backend_user except AttributeError: printout("No account provided.") self.backend_password = None self.backend_user = None
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) bugs = [] bugsdb = get_database(DBGerritBackend()) # still useless in gerrit bugsdb.insert_supported_traker("gerrit", "beta") trk = Tracker(Config.url + "_" + Config.gerrit_project, "gerrit", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_time = 0 last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if last_mod_date: printdbg("Last reviews analyzed were modified on date: %s" % last_mod_date) last_mod_time = time.mktime(time.strptime(last_mod_date, '%Y-%m-%d %H:%M:%S')) limit = 500 # gerrit default 500 last_item = "" # last_item = "001f672c00002f80"; number_results = limit total_reviews = 0 while (number_results == limit or number_results == limit + 1): # wikimedia gerrit returns limit+1 # ordered by lastUpdated tickets = self.getReviews(limit, last_item) number_results = 0 reviews = [] for entry in tickets: if 'project' in entry.keys(): if (entry['lastUpdated'] < last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) last_item = entry['sortKey'] # extra changes not included in gerrit changes # self.add_merged_abandoned_changes_from_comments(entry, review_data) self.add_merged_abandoned_changes(entry, review_data) self.add_new_change(review_data) bugsdb.insert_issue(review_data, dbtrk.id) number_results += 1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + last_item) total_reviews = total_reviews + int(number_results) self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id) print("Done. Number of reviews: " + str(total_reviews))
def __init__ (self): self.url = self._healthy_url(Config.url) self.delay = Config.delay self.cookies = {} self.version = None self.tracker = None self.retrieved = {} # retrieved issues on this run try: self.backend_password = Config.backend_password self.backend_user = Config.backend_user except AttributeError: printout("No bugzilla account provided, mail addresses won't " +\ "be retrieved") self.backend_password = None self.backend_user = None self.bugsdb = get_database(DBBugzillaBackend())
def run(self, url): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) self.url = url ids = [] self.parser = SourceForgeParser() # first we take the bugs ids if url.find("aid=") > 0: aux = url.split("aid=")[1].split("&")[0] ids.append(aux) else: ids = self.__get_issues_list(self.url) self.__check_tracker_url(self.url) # order the parameters in the url to add the same tracker url # to data base without aid parameter self.__order_query(self.url) self.db = get_database(DBSourceForgeBackend()) self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0], SUPPORTED_SF_TRACKERS[1]) self.__insert_tracker(self.url) nbugs = len(ids) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) nbugs = len(ids) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) for id in ids: url = self.url + "&func=detail&aid=%s" % id # FIXME:urls!!! printdbg(url) issue = self.__get_issue(url) self.__insert_issue(issue) time.sleep(self.delay) printout("Done. %s bugs analyzed" % (nbugs))
def run(self): printout("Running Bicho with delay of %s seconds" % str(self.delay)) self._login() self._set_version() self._set_tracker() self._process_issues() if not self.retrieved: printout("No issues found. Did you provide the correct url?") else: printout("Done. %d issues retrieved" % len(self.retrieved))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) ids = [] self.parser = SourceForgeParser() #first we take the bugs ids if url.find("aid=") > 0: aux = url.split("aid=")[1].split("&")[0] ids.append(aux) else: ids = self.__get_issues_list(self.url) self.__check_tracker_url(self.url) #order the parameters in the url to add the same tracker url #to data base without aid parameter self.__order_query(self.url) self.db = get_database(DBSourceForgeBackend()) self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0], SUPPORTED_SF_TRACKERS[1]) self.__insert_tracker(self.url) nbugs = len(ids) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) for id in ids: url = self.url + '&func=detail&aid=%s' % id # FIXME:urls!!! printdbg(url) issue = self.__get_issue(url) self.__insert_issue(issue) time.sleep(self.delay) printout("Done. %s bugs analyzed" % (nbugs))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_query = 250 start_issue = 1 bugs = [] bugsdb = get_database(DBGoogleCodeBackend()) # still useless bugsdb.insert_supported_traker("googlecode", "beta") trk = Tracker(Config.url, "googlecode", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url = Config.url # https://code.google.com/feeds/issues/p/mobile-time-care self.url_issues = Config.url + "/issues/full?max-results=1" printdbg("URL for getting metadata " + self.url_issues) d = feedparser.parse(self.url_issues) total_issues = int(d['feed']['opensearch_totalresults']) print "Total bugs: ", total_issues if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues * Config.delay) / (60), "m (", ( total_issues * Config.delay) / (60 * 60), "h)" while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str( issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m", " issues ", str( remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue))
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = [ "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)" ] last_mod_date = bugsdb.get_last_modification_date() if last_mod_date: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) # if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue #for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay)
parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e) else: self.last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if self.last_mod_date: # self.url = self.url + "&updated:after=" + last_mod_date printdbg("Last bugs cached were modified at: %s" % self.last_mod_date) bugs_number = self.bugsNumber(self.url) print "Tickets to be retrieved:", str(bugs_number) remaining = bugs_number while (remaining > 0): self.analyze_bug_list(issues_per_xml_query, bugs_number - remaining, bugsdb, dbtrk.id) remaining -= issues_per_xml_query #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")" time.sleep(self.delay) printout("Done. %s bugs analyzed" % (bugs_number)) Backend.register_backend("jira", JiraBackend)
f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs=[] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url+"/"+str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining)*Config.delay/60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues-remaining)) Backend.register_backend('allura', Allura)
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) bugsdb = get_database(DBGithubBackend()) url = self.url pname = None pname = self.__get_project_from_url() printdbg(url) self.bugs_state = "open" self.pagecont = 1 self.mod_date_open = None self.mod_date_closed = None ## FIXME tracker must be also checked!!! aux_date_open = bugsdb.get_last_modification_date(state="open") if aux_date_open: self.mod_date_open = aux_date_open.isoformat() aux_date_closed = bugsdb.get_last_modification_date(state="closed") if aux_date_closed: self.mod_date_closed = aux_date_closed.isoformat() printdbg("Last open bug already cached: %s" % self.mod_date_open) printdbg("Last closed bug already cached: %s" % self.mod_date_closed) bugs = self.__get_batch_bugs() nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("github", "v3") trk = Tracker(url, "github", "v3") dbtrk = bugsdb.insert_tracker(trk) # if len(bugs) == 0: if aux_date_open or aux_date_closed: printout("Bicho database up to date") else: printout("No bugs found. Did you provide the correct url?") sys.exit(0) auxcont = 0 while len(bugs) > 0: for bug in bugs: try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the # original tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e time.sleep(self.delay) self.pagecont += 1 bugs = self.__get_batch_bugs() nbugs = nbugs + len(bugs)
parser = xml.sax.make_parser( ) handler = BugsHandler( ) parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues()[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e) else: self.last_mod_date = bugsdb.get_last_modification_date(dbtrk.id) if self.last_mod_date: # self.url = self.url + "&updated:after=" + last_mod_date printdbg("Last bugs cached were modified at: %s" % self.last_mod_date) bugs_number = self.bugsNumber(self.url) print "Tickets to be retrieved:", str(bugs_number) remaining = bugs_number while (remaining>0): self.analyze_bug_list(issues_per_xml_query, bugs_number-remaining, bugsdb, dbtrk.id) remaining -= issues_per_xml_query #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")" time.sleep(self.delay) printout("Done. %s bugs analyzed" % (bugs_number)) Backend.register_backend ("jira", JiraBackend)
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)"] last_mod_date = bugsdb.get_last_modification_date() if last_mod_date: bugs = self.lp.projects[pname].searchTasks(status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks(status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) # if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue # for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay)
class LPBackend(Backend): def __init__(self): self.url = Config.url self.delay = Config.delay def get_domain(self, url): strings = url.split('/') return strings[0] + "//" + strings[2] + "/" def _get_person(self, lpperson): """ Returns Bicho People object from Launchpad person object """ p = People(lpperson.name) p.set_name(lpperson.display_name) if lpperson.confirmed_email_addresses: for m in lpperson.confirmed_email_addresses: p.set_email(m.email) break return p def analyze_bug(self, bug): #Retrieving main bug information ## ## all the retrieval can be improved. The method bug.lp_attributes ##offers a list of the available attributes for the object ## printdbg(bug.web_link + " updated at " + bug.bug.date_last_updated.isoformat()) issue = bug.web_link[bug.web_link.rfind('/') + 1:] bug_type = bug.importance summary = bug.bug.title desc = bug.bug.description submitted_by = self._get_person(bug.owner) submitted_on = self.__drop_timezone(bug.date_created) if bug.assignee: assignee = self._get_person(bug.assignee) else: assignee = People("nobody") issue = LaunchpadIssue(issue, bug_type, summary, desc, submitted_by, submitted_on) issue.set_assigned(assignee) issue.set_status(bug.status) issue.set_description(bug.bug.description) issue.set_web_link(bug.web_link) issue.set_target_display_name(bug.bug_target_display_name) issue.set_target_name(bug.bug_target_name) try: if bug.date_assigned: issue.set_date_assigned(self.__drop_timezone( bug.date_assigned)) except AttributeError: pass try: if bug.date_closed: issue.set_date_closed(self.__drop_timezone(bug.date_closed)) except AttributeError: pass try: if bug.date_confirmed: issue.set_date_confirmed( self.__drop_timezone(bug.date_confirmed)) except AttributeError: pass try: if bug.date_created: issue.set_date_created(self.__drop_timezone(bug.date_created)) except AttributeError: pass try: if bug.date_fix_committed: issue.set_date_fix_committed( self.__drop_timezone(bug.date_fix_committed)) except AttributeError: pass try: if bug.date_fix_released: issue.set_date_fix_released( self.__drop_timezone(bug.date_fix_released)) except AttributeError: pass try: if bug.date_in_progress: issue.set_date_in_progress( self.__drop_timezone(bug.date_in_progress)) except AttributeError: pass try: if bug.date_incomplete: issue.set_date_incomplete( self.__drop_timezone(bug.date_incomplete)) except AttributeError: pass try: if bug.date_left_closed: issue.set_date_left_closed( self.__drop_timezone(bug.date_left_closed)) except AttributeError: pass try: if bug.date_left_new: issue.set_date_left_new(self.__drop_timezone( bug.date_left_new)) except AttributeError: pass try: if bug.date_triaged: issue.set_date_triaged(self.__drop_timezone(bug.date_triaged)) except AttributeError: pass try: if bug.date_last_message: issue.set_date_last_message( self.__drop_timezone(bug.date_last_message)) except AttributeError: pass try: if bug.bug.date_last_updated: issue.set_date_last_updated( self.__drop_timezone(bug.bug.date_last_updated)) except AttributeError: pass if bug.milestone: issue.set_milestone_code_name(bug.milestone.code_name) issue.set_milestone_data_targeted(bug.milestone.date_targeted) issue.set_milestone_name(bug.milestone.name) issue.set_milestone_summary(bug.milestone.summary) issue.set_milestone_title(bug.milestone.title) issue.set_milestone_web_link(bug.milestone.web_link) if bug.bug.duplicate_of: temp_rel = TempRelationship(bug.bug.id, unicode('duplicate_of'), unicode(bug.bug.duplicate_of.id)) issue.add_temp_relationship(temp_rel) issue.set_heat(bug.bug.heat) issue.set_linked_branches(bug.bug.linked_branches) # storing the comments: # first message of the bugs contains the description if (bug.bug.messages and len(bug.bug.messages) > 1): skip = 1 for c in bug.bug.messages: if (skip == 1): # we skip the first comment which is the description skip = 0 continue by = self._get_person(c.owner) com = Comment(c.content, by, c.date_created) issue.add_comment(com) issue.set_tags(bug.bug.tags) issue.set_title(bug.bug.title) issue.set_users_affected_count(bug.bug.users_affected_count) issue.set_web_link_standalone(bug.bug.web_link) # activity for entry in bug.bug.activity.entries: field = entry['whatchanged'] removed = entry['oldvalue'] added = entry['newvalue'] by = self.__get_people_from_uri(entry['person_link']) date = self.__to_datetime(entry['datechanged']) change = Change(field, removed, added, by, date) issue.add_change(change) for a in bug.bug.attachments.entries: a_url = a['data_link'] a_name = a['title'] # author and date are stored in the comment object aux = a['message_link'] comment_id = int(aux[aux.rfind('/') + 1:]) comment = bug.bug.messages[comment_id] a_by = self._get_person(comment.owner) a_on = self.__drop_timezone(comment.date_created) #a_desc = a[''] att = Attachment(a_url, a_by, a_on) att.set_name(a_name) #att.set_description() issue.add_attachment(att) return issue def __to_datetime(self, str): # converts str time to datetime return self.__drop_timezone(parse(str)) def __drop_timezone(self, dt): # drop the timezone from the datetime objetct # MySQL doesn't support timezone, we remove it if dt.isoformat().rfind('+') > 0: aux = parse(dt.isoformat()[:dt.isoformat().rfind('+')]) return aux else: return dt def _get_nickname_from_uri(self, uri): aux = uri.rfind('~') + 1 return uri[aux:] def __get_people_from_uri(self, uri): # returns People object from uri (person_link) try: people_lp = self.lp.people[self._get_nickname_from_uri(uri)] people_issue = People(people_lp.name) people_issue.set_name(people_lp.display_name) except KeyError: # user deleted from Launchpad! people_issue = People(self._get_nickname_from_uri(uri)) return people_issue def __get_project_from_url(self): project_name = None url = self.url if url[-1] == '/': url = url[:-1] if (url.rfind('://bugs.launchpad.net') >= 0) or \ (url.rfind('://launchpad.net') >= 0): project_name = url[url.rfind('/') + 1:] return project_name def __get_tracker_url_from_bug(self, bug): return bug.web_link[:bug.web_link.rfind('+bug') - 1] def __no_credential(): print "Can't proceed without Launchpad credential." sys.exit() def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = [ "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)" ] last_mod_date = bugsdb.get_last_modification_date() if last_mod_date: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) # if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue #for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay) try: # we read the temporary table with the relationships and create # the final one bugsdb.store_final_relationships() except: raise printout("Done. %s bugs analyzed" % (nbugs))
printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m", " issues ", str( remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue)) start_issue += issues_per_query printout("Done. %s bugs analyzed" % (total_issues - remaining)) Backend.register_backend('googlecode', GoogleCode)
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # limit=-1 is NOT recognized as 'all'. 500 is a reasonable limit. - allura code issues_per_query = 500 start_page = 0 bugs = [] bugsdb = get_database(DBAlluraBackend()) # still useless in allura bugsdb.insert_supported_traker("allura", "beta") trk = Tracker(Config.url, "allura", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date() # Date before the first ticket time_window_start = "1900-01-01T00:00:00Z" time_window_end = datetime.now().isoformat() + "Z" if last_mod_date: time_window_start = last_mod_date printdbg("Last bugs analyzed were modified on: %s" % last_mod_date) time_window = time_window_start + " TO " + time_window_end self.url_issues = Config.url + "/search/?limit=1" self.url_issues += "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]") printdbg("URL for getting metadata " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketTotal = json.loads(f.read()) total_issues = int(ticketTotal['count']) total_pages = total_issues / issues_per_query print("Number of tickets: " + str(total_issues)) if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)" while start_page <= total_pages: self.url_issues = Config.url + "/search/?limit=" + str(issues_per_query) self.url_issues += "&page=" + str(start_page) + "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]") # Order by mod_date_dt desc self.url_issues += "&sort=mod_date_dt+asc" printdbg("URL for next issues " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue))
f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) start_page += 1 printout("Done. Bugs analyzed:" + str(total_issues - remaining)) Backend.register_backend('allura', Allura)
def run (self, url): print("Running Bicho with delay of %s seconds" % (str(self.delay))) #retrieving data in csv format if not self.url: self.url = url bugsdb = get_database (DBBugzillaBackend()) url = self.url + "&ctype=csv" printdbg(url) #The url is a bug if url.find("show_bug.cgi")>0: bugs = [] bugs.append(self.url.split("show_bug.cgi?id=")[1]) else: f = urllib.urlopen(url) #Problems using csv library, not all the fields are delimited by # '"' character. Easier using split. bugList_csv = f.read().split('\n') bugs = [] #Ignoring first row for bug_csv in bugList_csv[1:]: #First field is the id field, necessary to later create the url #to retrieve bug information bugs.append(bug_csv.split(',')[0]) nbugs = len(bugs) nbugs = len(bugs) url = self.url url = self.get_domain(url) if url.find("apache")>0: url = url + "bugzilla/" # still useless bugsdb.insert_supported_traker("bugzilla", "3.2.3") trk = Tracker ( url, "bugzilla", "3.2.3") dbtrk = bugsdb.insert_tracker(trk) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) for bug in bugs: #The URL from bugzilla (so far KDE and GNOME) are like: #http://<domain>/show_bug.cgi?id=<bugid>&ctype=xml try: issue_data = self.analyze_bug(bug, url) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: %s and Bug: %s" % (url,bug)) #print e #continue raise try: bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) time.sleep(self.delay) printout("Done. %s bugs analyzed" % (nbugs))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # limit=-1 is NOT recognized as 'all'. 500 is a reasonable limit. - allura code issues_per_query = 500 start_page=0 bugs = []; bugsdb = get_database (DBAlluraBackend()) # still useless in allura bugsdb.insert_supported_traker("allura", "beta") trk = Tracker (Config.url, "allura", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date() # Date before the first ticket time_window_start = "1900-01-01T00:00:00Z" time_window_end = datetime.now().isoformat()+"Z" if last_mod_date: time_window_start = last_mod_date printdbg("Last bugs analyzed were modified on: %s" % last_mod_date) time_window = time_window_start + " TO " + time_window_end self.url_issues = Config.url + "/search/?limit=1" self.url_issues += "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:["+time_window+"]") printdbg("URL for getting metadata " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketTotal = json.loads(f.read()) total_issues = int(ticketTotal['count']) total_pages = total_issues/issues_per_query print("Number of tickets: " + str(total_issues)) if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues*Config.delay)/(60), "m (", (total_issues*Config.delay)/(60*60), "h)" while start_page <= total_pages: self.url_issues = Config.url + "/search/?limit="+str(issues_per_query) self.url_issues += "&page=" + str(start_page) + "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:["+time_window+"]") # Order by mod_date_dt desc self.url_issues += "&sort=mod_date_dt+asc" printdbg("URL for next issues " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs=[] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url+"/"+str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining)*Config.delay/60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue))
while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue)) start_issue += issues_per_query printout("Done. %s bugs analyzed" % (total_issues - remaining)) Backend.register_backend('googlecode', GoogleCode)
class GithubBackend(Backend): def __init__(self): self.url = Config.url self.delay = Config.delay try: self.backend_password = Config.backend_password self.backend_user = Config.backend_user except AttributeError: printerr("\n--backend-user and --backend-password are mandatory \ to download bugs from Github\n") sys.exit(1) self.remaining_ratelimit = 0 def get_domain(self, url): strings = url.split('/') return strings[0] + "//" + strings[2] + "/" def analyze_bug(self, bug): #Retrieving main bug information printdbg(bug['url'] + " " + bug['state'] + " updated_at " + bug['updated_at'] + ' (ratelimit = ' + str(self.remaining_ratelimit) + ")") issue = bug['id'] if bug['labels']: bug_type = bug['labels'][0]['name'] # FIXME else: bug_type = unicode('') summary = bug['title'] desc = bug['body'] submitted_by = People(bug['user']['login']) ## FIXME send petition to bug['user']['url'] submitted_on = self.__to_datetime(bug['created_at']) if bug['assignee']: assignee = People(bug['assignee']['login']) ## assignee.set_name(bug.assignee.display_name) ## FIXME get name from bug['assignee']['url'] else: assignee = People(unicode("nobody")) issue = GithubIssue(issue, bug_type, summary, desc, submitted_by, submitted_on) issue.set_assigned(assignee) issue.set_status(bug['state']) issue.set_description(bug['body']) issue.set_web_link(bug['html_url']) try: if bug['closed_at']: issue.set_closed_at(self.__to_datetime(bug['closed_at'])) except AttributeError: pass # updated_at offers ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ # MySQL doesn't support timezone, we remove it issue.set_updated_at(self.__to_datetime(bug['updated_at'])) if bug['milestone']: issue.set_milestone_name(bug['milestone']['id']) issue.set_milestone_summary(bug['milestone']['description']) issue.set_milestone_title(bug['milestone']['title']) issue.set_milestone_web_link(bug['milestone']['url']) comments = self.__get_batch_comments(bug['number']) for c in comments: by = People(c['user']['login']) ## by.setname() FIXME - to be done date = self.__to_datetime(c['created_at']) com = Comment(c['body'], by, date) issue.add_comment(com) # activity entries = self.__get_batch_activities(bug['number']) for e in entries: field = e['event'] added = e['commit_id'] removed = unicode('') if e['actor']: by = People(e['actor']['login']) else: by = People(u"nobody") ## by.setname() FIXME - to be done date = self.__to_datetime(e['created_at']) change = Change(field, removed, added, by, date) issue.add_change(change) return issue def __to_datetime(self, str): # converts str time to datetime # MySQL doesn't support timezone, we remove it return parse(str[:-1]) def __get_project_from_url(self): project_name = None url = self.url if url[-1] == '/': url = url[:-1] aux2 = url.rfind('/issues') aux1 = len('https://api.github.com/repos/') project_name = url[aux1:aux2] return project_name def __get_tracker_url_from_bug(self, bug): return bug['url'][:bug['url'].rfind('/')] def __get_batch_activities(self, bug_number): url = self.url + "/" + str(bug_number) + "/events" base64string = base64.encodestring( '%s:%s' % (self.backend_user, self.backend_password)).replace( '\n', '') request = urllib2.Request(url) request.add_header("Authorization", "Basic %s" % base64string) result = urllib2.urlopen(request) content = result.read() events = json.loads(content) return events def __get_batch_comments(self, bug_number): url = self.url + "/" + str(bug_number) + "/comments" base64string = base64.encodestring( '%s:%s' % (self.backend_user, self.backend_password)).replace( '\n', '') request = urllib2.Request(url) request.add_header("Authorization", "Basic %s" % base64string) result = urllib2.urlopen(request) content = result.read() comments = json.loads(content) return comments def __get_batch_bugs_state(self, state=OPEN_STATE, since=None): if state == OPEN_STATE: url = self.url + "?state=open&page=" + str(self.pagecont) \ + "&per_page=100&sort=updated&direction=asc" else: url = self.url + "?state=closed&page=" + str(self.pagecont) \ + "&per_page=100&sort=updated&direction=asc" # we need to download both closed and open bugs, #by default state = open if since: url = url + "&since=" + str(since) base64string = base64.encodestring( '%s:%s' % (self.backend_user, self.backend_password)).replace( '\n', '') request = urllib2.Request(url) request.add_header("Authorization", "Basic %s" % base64string) result = urllib2.urlopen(request) content = result.read() self.remaining_ratelimit = result.info()['x-ratelimit-remaining'] bugs = json.loads(content) return bugs def __get_batch_bugs(self): if self.bugs_state == OPEN_STATE: bugs = self.__get_batch_bugs_state(state=OPEN_STATE, since=self.mod_date_open) if len(bugs) == 0: self.bugs_state = CLOSED_STATE self.pagecont = 1 if self.bugs_state == CLOSED_STATE: bugs = self.__get_batch_bugs_state(state=CLOSED_STATE, since=self.mod_date_closed) return bugs def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) bugsdb = get_database(DBGithubBackend()) url = self.url pname = None pname = self.__get_project_from_url() printdbg(url) self.bugs_state = "open" self.pagecont = 1 self.mod_date_open = None self.mod_date_closed = None ## FIXME tracker must be also checked!!! aux_date_open = bugsdb.get_last_modification_date(state="open") if aux_date_open: self.mod_date_open = aux_date_open.isoformat() aux_date_closed = bugsdb.get_last_modification_date(state="closed") if aux_date_closed: self.mod_date_closed = aux_date_closed.isoformat() printdbg("Last open bug already cached: %s" % self.mod_date_open) printdbg("Last closed bug already cached: %s" % self.mod_date_closed) bugs = self.__get_batch_bugs() nbugs = len(bugs) # still useless bugsdb.insert_supported_traker("github", "v3") trk = Tracker(url, "github", "v3") dbtrk = bugsdb.insert_tracker(trk) # if len(bugs) == 0: if aux_date_open or aux_date_closed: printout("Bicho database up to date") else: printout("No bugs found. Did you provide the correct url?") sys.exit(0) auxcont = 0 while len(bugs) > 0: for bug in bugs: try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the # original tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e time.sleep(self.delay) self.pagecont += 1 bugs = self.__get_batch_bugs() nbugs = nbugs + len(bugs) #end while printout("Done. %s bugs analyzed" % (nbugs))