def insert_tracker(self, url): self.db.insert_supported_traker('maniphest', None) trk = Tracker(url, 'maniphest', None) dbtrk = self.db.insert_tracker(trk) return dbtrk
def run(self): printout("Running Bicho with delay of %s seconds" % (str(self.delay))) self.conn.login(self.url, self.backend_user, self.backend_password) bugsdb = get_database(DBJiraBackend()) bugsdb.insert_supported_traker("jira", "4.1.2") trk = Tracker(self.url.split("-")[0], "jira", "4.1.2") dbtrk = bugsdb.insert_tracker(trk) serverUrl = self.url.split("/browse/")[0] query = "/si/jira.issueviews:issue-xml/" project = self.url.split("/browse/")[1] if (project.split("-").__len__() > 1): bug_key = project project = project.split("-")[0] bugs_number = 1 printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml") parser = xml.sax.make_parser() handler = BugsHandler() parser.setContentHandler(handler) try: parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml") issue = handler.getIssues(self.conn)[0] bugsdb.insert_issue(issue, dbtrk.id) except Exception, e: #printerr(e) print(e)
def run(self): self.debug = False logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') logging.info("Running StoryBoard bicho backend") self.items_per_query = 500 # max limit by default in https://storyboard.openstack.org/api/v1/ if (self.debug): self.items_per_query = 10 # debug self.bugsdb = get_database(DBStoryBoardBackend()) self.bugsdb.insert_supported_traker("storyboard", "beta") trk = Tracker(Config.url, "storyboard", "beta") self.dbtrk = self.bugsdb.insert_tracker(trk) self.last_mod_date = self.bugsdb.get_last_modification_date() if self.last_mod_date: logging.info("Last bugs analyzed were modified on: %s" % self.last_mod_date) self.analyze_users() self.analyze_tasks() self.analyze_stories_events() self.check_tasks_events()
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) bugs = [] bugsdb = get_database(DBGerritBackend()) # still useless in gerrit bugsdb.insert_supported_traker("gerrit", "beta") trk = Tracker(Config.url + "_" + Config.gerrit_project, "gerrit", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_time = 0 last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if last_mod_date: printdbg("Last reviews analyzed were modified on date: %s" % last_mod_date) last_mod_time = time.mktime(time.strptime(last_mod_date, '%Y-%m-%d %H:%M:%S')) limit = 500 # gerrit default 500 last_item = "" # last_item = "001f672c00002f80"; number_results = limit total_reviews = 0 while (number_results == limit or number_results == limit + 1): # wikimedia gerrit returns limit+1 # ordered by lastUpdated tickets = self.getReviews(limit, last_item) number_results = 0 reviews = [] for entry in tickets: if 'project' in entry.keys(): if (entry['lastUpdated'] < last_mod_time): break reviews.append(entry["number"]) review_data = self.analyze_review(entry) if review_data is None: pprint.pprint("ERROR in review. Ignoring it.") continue last_item = entry['sortKey'] # extra changes not included in gerrit changes # self.add_merged_abandoned_changes_from_comments(entry, review_data) self.add_merged_abandoned_changes(entry, review_data) self.add_uploaded_patchset_from_comments(entry, review_data) self.add_new_change(review_data) bugsdb.insert_issue(review_data, dbtrk.id) number_results += 1 elif 'rowCount' in entry.keys(): pprint.pprint(entry) printdbg("CONTINUE FROM: " + last_item) total_reviews = total_reviews + int(number_results) self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id) print("Done. Number of reviews: " + str(total_reviews))
def insert_tracker(self, url, group=None): self.db.insert_supported_traker('reviewboard', None) if group: url = url + '/groups/' + group trk = Tracker(url, 'reviewboard', None) dbtrk = self.db.insert_tracker(trk) return dbtrk
def setUpBackend(): backend_name = 'allura' Config.delay = 1 Config.debug = True Config.url = "http://sourceforge.net/rest/p/allura/tickets" AlluraTest.setUpDB() AlluraTest.issuesDB = get_database(DBAlluraBackend()) AlluraTest.issuesDB.insert_supported_traker(backend_name, "beta") AlluraTest.tracker = Tracker(Config.url, backend_name, "beta") AlluraTest.dbtracker = AlluraTest.issuesDB.insert_tracker(AlluraTest.tracker) AlluraTest.tests_data_dir = os.path.join('./data/', AlluraTest.tracker.name) AlluraTest.backend = Backend.create_backend(backend_name) if not os.path.isdir(AlluraTest.tests_data_dir): os.makedirs(AlluraTest.tests_data_dir)
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = [ "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)" ] # still useless bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if last_mod_date: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = self.lp.projects[pname].searchTasks( status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) if nbugs == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) analyzed = [] for bug in bugs: if bug.web_link in analyzed: continue # for the bizarre error #338 try: issue_data = self.analyze_bug(bug) except Exception: #FIXME it does not handle the e printerr("Error in function analyzeBug with URL: ' \ '%s and Bug: %s" % (url, bug)) raise try: # we can have meta-trackers but we want to have the original #tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except NotFoundError: printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue)) print e analyzed.append(bug.web_link) # for the bizarre error #338 time.sleep(self.delay)
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # redmine 1.0 support last_page = 1 tickets_page = 25 # fixed redmine bugs = [] bugsdb = get_database(DBRedmineBackend()) # still useless in redmine bugsdb.insert_supported_traker("redmine", "beta") trk = Tracker(Config.url, "redmine", "beta") dbtrk = bugsdb.insert_tracker(trk) updated_on = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) self.url_issues = self._get_issues_url(updated_on) url = self.url_issues + "&page=" + str(last_page) request = urllib2.Request(url) if self.backend_user: base64string = base64.encodestring( '%s:%s' % (Config.backend_user, Config.backend_password)).replace( '\n', '') request.add_header("Authorization", "Basic %s" % base64string) # Get statuses self._get_statuses() f = urllib2.urlopen(request) tickets = json.loads(f.read()) if not tickets["issues"]: printout("Done. No new bugs to analyze") return for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) time.sleep(self.delay) last_ticket = tickets["issues"][0]['id'] while True: last_page += 1 url = self.url_issues + "&page=" + str(last_page) request = urllib2.Request(url) #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '') #request.add_header("Authorization", "Basic %s" % base64string) f = urllib2.urlopen(request) tickets = json.loads(f.read()) if len(tickets['issues']) == 0: break pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id'])) if tickets["issues"][0]['id'] == last_ticket: break for ticket in tickets["issues"]: issue = self.analyze_bug(ticket) bugsdb.insert_issue(issue, dbtrk.id) time.sleep(self.delay) pprint.pprint("Total pages: " + str(last_page)) printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) bugsdb = get_database(DBGithubBackend()) url = self.url pname = None pname = self.__get_project_from_url() printdbg(url) bugsdb.insert_supported_traker("github", "v3") trk = Tracker(url, "github", "v3") dbtrk = bugsdb.insert_tracker(trk) self.bugs_state = ALL_STATES self.pagecont = 1 self.mod_date = None aux_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if aux_date: self.mod_date = aux_date.isoformat() printdbg("Last issue already cached: %s" % self.mod_date) try: bugs = self.__get_batch_bugs() except GitHubRateLimitReached: printout( "GitHub rate limit reached. To resume, wait some minutes.") sys.exit(0) nbugs = len(bugs) if len(bugs) == 0: if aux_date: printout("Bicho database up to date") else: printout("No bugs found. Did you provide the correct url?") sys.exit(0) auxcont = 0 while len(bugs) > 0: for bug in bugs: try: issue_data = self.analyze_bug(bug) except GitHubRateLimitReached: printout( "GitHub rate limit reached. To resume, wait some minutes." ) sys.exit(0) except Exception: #FIXME it does not handle the e msg = "Error in function analyzeBug with URL: %s and bug: %s" % ( url, bug) printerr(msg) raise try: # we can have meta-trackers but we want to have the # original tracker name tr_url = self.__get_tracker_url_from_bug(bug) if (tr_url != url): aux_trk = Tracker(tr_url, "github", "v3") dbtrk = bugsdb.insert_tracker(aux_trk) bugsdb.insert_issue(issue_data, dbtrk.id) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue)) except Exception, e: printerr("ERROR: ") print e printdbg("Getting ticket number " + str(bug["number"])) time.sleep(self.delay) self.pagecont += 1 try: bugs = self.__get_batch_bugs() except GitHubRateLimitReached: printout( "GitHub rate limit reached. To resume, wait some minutes.") sys.exit(0) nbugs = nbugs + len(bugs)
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) # limit=-1 is NOT recognized as 'all'. 500 is a reasonable limit. - allura code issues_per_query = 500 start_page = 0 bugs = [] bugsdb = get_database(DBAlluraBackend()) # still useless in allura bugsdb.insert_supported_traker("allura", "beta") trk = Tracker(Config.url, "allura", "beta") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date() # Date before the first ticket time_window_start = "1900-01-01T00:00:00Z" time_window_end = datetime.now().isoformat() + "Z" if last_mod_date: time_window_start = last_mod_date printdbg("Last bugs analyzed were modified on: %s" % last_mod_date) time_window = time_window_start + " TO " + time_window_end self.url_issues = Config.url + "/search/?limit=1" self.url_issues += "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]") printdbg("URL for getting metadata " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketTotal = json.loads(f.read()) total_issues = int(ticketTotal['count']) total_pages = total_issues / issues_per_query print("Number of tickets: " + str(total_issues)) if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues * Config.delay) / (60), "m (", ( total_issues * Config.delay) / (60 * 60), "h)" while start_page <= total_pages: self.url_issues = Config.url + "/search/?limit=" + str( issues_per_query) self.url_issues += "&page=" + str(start_page) + "&q=" # A time range with all the tickets self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]") # Order by mod_date_dt desc self.url_issues += "&sort=mod_date_dt+asc" printdbg("URL for next issues " + self.url_issues) f = urllib.urlopen(self.url_issues) ticketList = json.loads(f.read()) bugs = [] for ticket in ticketList["tickets"]: bugs.append(ticket["ticket_num"]) for bug in bugs: try: issue_url = Config.url + "/" + str(bug) issue_data = self.analyze_bug(issue_url) if issue_data is None: continue bugsdb.insert_issue(issue_data, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m" time.sleep(self.delay) except Exception, e: printerr("Error in function analyze_bug " + issue_url) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue))
def run(self): """ """ printout("Running Bicho with delay of %s seconds" % (str(self.delay))) issues_per_query = 250 start_issue = 1 bugs = [] bugsdb = get_database(DBGoogleCodeBackend()) # still useless bugsdb.insert_supported_traker("googlecode", "beta") trk = Tracker(Config.url, "googlecode", "beta") dbtrk = bugsdb.insert_tracker(trk) self.url = Config.url # https://code.google.com/feeds/issues/p/mobile-time-care self.url_issues = Config.url + "/issues/full?max-results=1" printdbg("URL for getting metadata " + self.url_issues) d = feedparser.parse(self.url_issues) total_issues = int(d['feed']['opensearch_totalresults']) print "Total bugs: ", total_issues if total_issues == 0: printout("No bugs found. Did you provide the correct url?") sys.exit(0) remaining = total_issues print "ETA ", (total_issues * Config.delay) / (60), "m (", ( total_issues * Config.delay) / (60 * 60), "h)" while start_issue < total_issues: self.url_issues = Config.url + "/issues/full?max-results=" + str( issues_per_query) self.url_issues += "&start-index=" + str(start_issue) printdbg("URL for next issues " + self.url_issues) d = feedparser.parse(self.url_issues) for entry in d['entries']: try: issue = self.analyze_bug(entry) if issue is None: continue bugsdb.insert_issue(issue, dbtrk.id) remaining -= 1 print "Remaining time: ", ( remaining) * Config.delay / 60, "m", " issues ", str( remaining) time.sleep(Config.delay) except Exception, e: printerr("Error in function analyze_bug ") pprint.pprint(entry) traceback.print_exc(file=sys.stdout) except UnicodeEncodeError: printerr( "UnicodeEncodeError: the issue %s couldn't be stored" % (issue.issue))
def __insert_tracker(self, url): """ """ db_trk = self.db.insert_tracker( Tracker(url, SUPPORTED_SF_TRACKERS[0], SUPPORTED_SF_TRACKERS[1])) self.tracker_id = db_trk.id
def run(self): print("Running Bicho with delay of %s seconds" % (str(self.delay))) url = self.url pname = None pname = self.__get_project_from_url() bugsdb = get_database(DBLaunchpadBackend()) printdbg(url) # launchpad needs a temp directory to store cached data homedir = pwd.getpwuid(os.getuid()).pw_dir cachedir = os.path.join(homedir, ".cache/bicho/") if not os.path.exists(cachedir): os.makedirs(cachedir) cre_file = os.path.join(cachedir + 'launchpad-credential') self.lp = Launchpad.login_with('Bicho', 'production', credentials_file=cre_file) aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired", "Confirmed", "Triaged", "In Progress", "Fix Committed", "Fix Released", "Incomplete (with response)", "Incomplete (without response)"] # Check whether the project is a meta project lp_project = self.lp.projects[pname] if hasattr(lp_project, 'projects'): projects = [p for p in lp_project.projects] else: projects = [lp_project] printdbg("%s projects to analyze" % len(projects)) # Still useless - insert meta project bugsdb.insert_supported_traker("launchpad", "x.x") trk = Tracker(url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) for p in projects: trk_url = p.web_link.replace('://', '://bugs.') trk = Tracker(trk_url, "launchpad", "x.x") dbtrk = bugsdb.insert_tracker(trk) last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id) if last_mod_date: bugs = p.searchTasks(status=aux_status, omit_duplicates=False, order_by='date_last_updated', modified_since=last_mod_date) else: bugs = p.searchTasks(status=aux_status, omit_duplicates=False, order_by='date_last_updated') printdbg("Last bug already cached: %s" % last_mod_date) nbugs = len(bugs) if nbugs == 0: printout("No bugs found on %s" % p.name) continue else: printout("%s bugs found on %s" % (nbugs, p.name)) self.analyze_project_bugs(bugs, dbtrk, bugsdb)
def _set_tracker(self): # FIXME: supported trackers have to be inserted during # the initialization self.bugsdb.insert_supported_traker(BUGZILLA, self.version) trk = Tracker(self.url, BUGZILLA, self.version) self.tracker = self.bugsdb.insert_tracker(trk)