Ejemplo n.º 1
0
    def run(self):
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_xml_query = 500
        bugsdb = get_database(DBJiraBackend())

        bugsdb.insert_supported_traker("jira", "4.1.2")
        trk = Tracker(self.url.split("-")[0], "jira", "4.1.2")
        dbtrk = bugsdb.insert_tracker(trk)

        serverUrl = self.url.split("/browse/")[0]
        query = "/si/jira.issueviews:issue-xml/"
        project = self.url.split("/browse/")[1]

        if (project.split("-").__len__() > 1):
            bug_key = project
            project = project.split("-")[0]
            bugs_number = 1

            printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml")

            parser = xml.sax.make_parser()
            handler = BugsHandler()
            parser.setContentHandler(handler)
            try:
                parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml")
                issue = handler.getIssues()[0]
                bugsdb.insert_issue(issue, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)
Ejemplo n.º 2
0
    def run(self):
        self.debug = False
        logging.basicConfig(level=logging.INFO,
                            format='%(asctime)s %(message)s')

        logging.info("Running StoryBoard bicho backend")

        self.items_per_query = 500  # max limit by default in https://storyboard.openstack.org/api/v1/
        if (self.debug): self.items_per_query = 10  # debug

        self.bugsdb = get_database(DBStoryBoardBackend())

        self.bugsdb.insert_supported_traker("storyboard", "beta")
        trk = Tracker(Config.url, "storyboard", "beta")
        self.dbtrk = self.bugsdb.insert_tracker(trk)

        self.last_mod_date = self.bugsdb.get_last_modification_date()

        if self.last_mod_date:
            logging.info("Last bugs analyzed were modified on: %s" %
                         self.last_mod_date)

        self.analyze_users()
        self.analyze_tasks()
        self.analyze_stories_events()
        self.check_tasks_events()
Ejemplo n.º 3
0
    def run(self):
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 100
        
        bugsdb = get_database(DBJiraBackend())

        bugsdb.insert_supported_traker("jira", "4.1.2")
        trk = Tracker(self.url.split("-")[0], "jira", "4.1.2")
        dbtrk = bugsdb.insert_tracker(trk)

        serverUrl = self.url.split("/browse/")[0]
        query = "/si/jira.issueviews:issue-xml/"
        project = self.url.split("/browse/")[1]

        options_jira = {
            'server': self.serverUrl
        }
        
        jira = JIRA(options_jira)

        if (project.split("-").__len__() > 1):
            bug_key = project
            project = project.split("-")[0]
            bugs_number = self.bugsNumber(jira)

            try:
                issue = jira.issue(bug_key,expand='changelog')
                self.analyze_bug_list(issue, self.serverUrl+'/browse/', bugsdb, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)
Ejemplo n.º 4
0
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        self.max_issues = Config.nissues
        self.no_resume = Config.no_resume

        if Config.start_from:
            # Date format was already checked by config class
            from dateutil import parser
            self.start_from = parser.parse(Config.start_from)
            self.from_id = None
        elif Config.from_id:
            self.from_id = Config.from_id
            self.start_from = None
        else:
            self.start_from = None
            self.from_id = None

        self.db = get_database(DBManiphestBackend())

        self.identities = {}
        self.projects = {}

        try:
            self.backend_token = Config.backend_token
            self.conduit = Conduit(self.url, self.backend_token)
        except AttributeError:
            printerr("Error: --backend-token is mandatory to download issues from Maniphest\n")
            sys.exit(1)
Ejemplo n.º 5
0
    def run(self):
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        self.conn.login(self.url, self.backend_user, self.backend_password)

        bugsdb = get_database(DBJiraBackend())

        bugsdb.insert_supported_traker("jira", "4.1.2")
        trk = Tracker(self.url.split("-")[0], "jira", "4.1.2")
        dbtrk = bugsdb.insert_tracker(trk)

        serverUrl = self.url.split("/browse/")[0]
        query = "/si/jira.issueviews:issue-xml/"
        project = self.url.split("/browse/")[1]

        if (project.split("-").__len__() > 1):
            bug_key = project
            project = project.split("-")[0]
            bugs_number = 1

            printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml")

            parser = xml.sax.make_parser()
            handler = BugsHandler()
            parser.setContentHandler(handler)
            try:
                parser.parse(serverUrl + query + bug_key + "/" + bug_key +
                             ".xml")
                issue = handler.getIssues(self.conn)[0]
                bugsdb.insert_issue(issue, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)
Ejemplo n.º 6
0
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        self.max_issues = Config.nissues
        self.no_resume = Config.no_resume

        if Config.start_from:
            # Date format was already checked by config class
            from dateutil import parser
            self.start_from = parser.parse(Config.start_from)
            self.from_id = None
        elif Config.from_id:
            self.from_id = Config.from_id
            self.start_from = None
        else:
            self.start_from = None
            self.from_id = None

        self.db = get_database(DBManiphestBackend())

        self.identities = {}
        self.projects = {}

        try:
            self.backend_token = Config.backend_token
            self.conduit = Conduit(self.url, self.backend_token)
        except AttributeError:
            printerr(
                "Error: --backend-token is mandatory to download issues from Maniphest\n"
            )
            sys.exit(1)
Ejemplo n.º 7
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugs = []
        bugsdb = get_database(DBGerritBackend())

        # still useless in gerrit
        bugsdb.insert_supported_traker("gerrit", "beta")
        trk = Tracker(Config.url + "_" + Config.gerrit_project, "gerrit", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_time = 0
        last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
        if last_mod_date:
            printdbg("Last reviews analyzed were modified on date: %s"
                     % last_mod_date)
            last_mod_time = time.mktime(time.strptime(last_mod_date, '%Y-%m-%d %H:%M:%S'))

        limit = 500  # gerrit default 500
        last_item = ""
        # last_item = "001f672c00002f80";
        number_results = limit
        total_reviews = 0

        while (number_results == limit or
               number_results == limit + 1):  # wikimedia gerrit returns limit+1
            # ordered by lastUpdated
            tickets = self.getReviews(limit, last_item)
            number_results = 0

            reviews = []
            for entry in tickets:
                if 'project' in entry.keys():
                    if (entry['lastUpdated'] < last_mod_time):
                        break
                    reviews.append(entry["number"])
                    review_data = self.analyze_review(entry)

                    if review_data is None:
                        pprint.pprint("ERROR in review. Ignoring it.")
                        continue

                    last_item = entry['sortKey']
                    # extra changes not included in gerrit changes
                    # self.add_merged_abandoned_changes_from_comments(entry, review_data)
                    self.add_merged_abandoned_changes(entry, review_data)
                    self.add_uploaded_patchset_from_comments(entry, review_data)
                    self.add_new_change(review_data)
                    bugsdb.insert_issue(review_data, dbtrk.id)
                    number_results += 1
                elif 'rowCount' in entry.keys():
                    pprint.pprint(entry)
                    printdbg("CONTINUE FROM: " + last_item)
            total_reviews = total_reviews + int(number_results)
        self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id)

        print("Done. Number of reviews: " + str(total_reviews))
Ejemplo n.º 8
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 250
        start_issue = 1

        bugs = []
        bugsdb = get_database(DBGoogleCodeBackend())

        # still useless
        bugsdb.insert_supported_traker("googlecode", "beta")
        trk = Tracker(Config.url, "googlecode", "beta")

        dbtrk = bugsdb.insert_tracker(trk)

        self.url = Config.url

       #  https://code.google.com/feeds/issues/p/mobile-time-care
        self.url_issues = Config.url + "/issues/full?max-results=1"
        printdbg("URL for getting metadata " + self.url_issues)

        d = feedparser.parse(self.url_issues)

        total_issues = int(d['feed']['opensearch_totalresults'])
        print "Total bugs: ", total_issues
        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)"

        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue.issue))
Ejemplo n.º 9
0
    def run(self):

        cfg = Config()
        cfg.load_from_file("/home/user/Grimoire/Bicho/bicho/bicho.conf")

        # url = 'http://10.137.2.15:8000/test/'
        # url = 'http://dev.aubio.org/'
        url = 'http://trac.nginx.org/nginx/'
        # url = 'http://software.rtcm-ntrip.org/'
        # project = "http://trac.nginx.org/nginx/"
        # issues = TracIssue
        tibi = TracBackend()
        issues = tibi.getIDs(url)
        bugsdb = get_database(DBTracBackend())
        bugsdb.insert_supported_traker("trac", "1.0.6post2")

        trk = Tracker(url, "trac", "1.0.6post2")
        dbtrk = bugsdb.insert_tracker(trk)

        self.start_from = 0 if self.start_from is None else self.start_from
        self.end_with = len(issues) if self.end_with is None else self.end_with

        for i in range(len(issues)):
            if i < self.start_from:
                continue
            elif i > self.end_with:
                break

            printdbg("We are trying issue: {}".format(issues[i]))
            try:
                printdbg("Getting the entry")

                raw_data = tibi.getIssue(url, issues[i])
                printdbg("Parsing the entry")
                issue = tibi.analyzeBug(raw_data)
                printdbg("Inserting the issue into the DB")

                # Put an issue into the database.
                # pprint(vars(issue))
                bugsdb.insert_issue(issue, dbtrk.id)

            except UnicodeEncodeError, e:
                printerr(
                    "UnicodeEncodeError: the issue %s couldn't be stored"
                    % (issues[i]))
                print e

            except Exception, e:
                printerr("Error :")
                # print e
                import traceback
                traceback.print_exc()
                sys.exit(0)
Ejemplo n.º 10
0
    def __init__(self):
        self.url = Config.url

        if self.url.endswith('/'):
            self.url = Config.url[0:-1]

        self.delay = Config.delay
        self.identities = {}

        self.db = get_database(DBTracBackend())

        self.trac_rpc = TracRPC(self.url)
Ejemplo n.º 11
0
    def __init__(self):
        self.url = Config.url

        if self.url.endswith('/'):
            self.url = Config.url[0:-1]

        self.delay = Config.delay
        self.identities = {}

        self.db = get_database(DBTracBackend())

        self.trac_rpc = TracRPC(self.url)
Ejemplo n.º 12
0
    def setUpBackend():
        backend_name = 'allura'
        Config.delay = 1
        Config.debug = True
        Config.url = "http://sourceforge.net/rest/p/allura/tickets"
        AlluraTest.setUpDB()
        AlluraTest.issuesDB = get_database(DBAlluraBackend())

        AlluraTest.issuesDB.insert_supported_traker(backend_name, "beta")
        AlluraTest.tracker = Tracker(Config.url, backend_name, "beta")
        AlluraTest.dbtracker = AlluraTest.issuesDB.insert_tracker(AlluraTest.tracker)

        AlluraTest.tests_data_dir = os.path.join('./data/', AlluraTest.tracker.name)
        AlluraTest.backend = Backend.create_backend(backend_name)

        if not os.path.isdir(AlluraTest.tests_data_dir):
            os.makedirs(AlluraTest.tests_data_dir)
Ejemplo n.º 13
0
    def __init__(self):
        self.url = self._healthy_url(Config.url)
        self.delay = Config.delay
        self.cookies = {}
        self.version = None
        self.tracker = None
        self.retrieved = {}  # retrieved issues on this run

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No Bugzilla account provided, mail addresses won't " +
                     "be retrieved")
            self.backend_password = None
            self.backend_user = None

        self.bugsdb = get_database(DBBugzillaBackend())
Ejemplo n.º 14
0
    def __init__(self):
        self.url = self._healthy_url(Config.url)
        self.delay = Config.delay
        self.max_issues = Config.nissues
        self.cookies = {}
        self.version = None
        self.tracker = None
        self.retrieved = {}  # retrieved issues on this run

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No Bugzilla account provided, mail addresses won't " +
                     "be retrieved")
            self.backend_password = None
            self.backend_user = None

        self.bugsdb = get_database(DBBugzillaBackend())
Ejemplo n.º 15
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        ids = []
        self.parser = SourceForgeParser()

        #first we take the bugs ids
        if url.find("aid=") > 0:
            aux = url.split("aid=")[1].split("&")[0]
            ids.append(aux)
        else:
            ids = self.__get_issues_list(self.url)

        self.__check_tracker_url(self.url)

        #order the parameters in the url to add the same tracker url
        #to data base without aid parameter
        self.__order_query(self.url)

        self.db = get_database(DBSourceForgeBackend())
        self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0],
                                        SUPPORTED_SF_TRACKERS[1])
        self.__insert_tracker(self.url)

        nbugs = len(ids)
        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        for id in ids:
            url = self.url + '&func=detail&aid=%s' % id  # FIXME:urls!!!
            printdbg(url)
            issue = self.__get_issue(url)
            self.__insert_issue(issue)

            time.sleep(self.delay)

        printout("Done. %s bugs analyzed" % (nbugs))
Ejemplo n.º 16
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        ids = []
        self.parser = SourceForgeParser()

        #first we take the bugs ids
        if url.find("aid=") > 0:
            aux = url.split("aid=")[1].split("&")[0]
            ids.append(aux)
        else:
            ids = self.__get_issues_list(self.url)

        self.__check_tracker_url(self.url)

        #order the parameters in the url to add the same tracker url
        #to data base without aid parameter
        self.__order_query(self.url)

        self.db = get_database(DBSourceForgeBackend())
        self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0],
                                        SUPPORTED_SF_TRACKERS[1])
        self.__insert_tracker(self.url)

        nbugs = len(ids)
        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        for id in ids:
            url = self.url + '&func=detail&aid=%s' % id  # FIXME:urls!!!
            printdbg(url)
            issue = self.__get_issue(url)
            self.__insert_issue(issue)

            time.sleep(self.delay)

        printout("Done. %s bugs analyzed" % (nbugs))
Ejemplo n.º 17
0
    def __init__(self):
        parts = urlparse.urlsplit(Config.url)

        self.url = Config.url
        self.base_url = parts.scheme + '://' + parts.netloc
        self.group = None
        self.max_issues = Config.nissues

        if parts.path and parts.path.startswith('/groups/'):
            group = parts.path.replace('/groups/', '')

            if group.endswith('/'):
                group = group[:-1]

            self.group = group

        self.delay = Config.delay
        self.identities = {}

        self.db = get_database(DBReviewBoardBackend())

        self.api_client = ReviewBoardAPIClient(self.base_url)
Ejemplo n.º 18
0
    def __init__(self):
        parts = urlparse.urlsplit(Config.url)

        self.url = Config.url
        self.base_url = parts.scheme + '://' + parts.netloc
        self.group = None
        self.max_issues = Config.nissues

        if parts.path and parts.path.startswith('/groups/'):
            group = parts.path.replace('/groups/', '')

            if group.endswith('/'):
                group = group[:-1]

            self.group = group

        self.delay = Config.delay
        self.identities = {}

        self.db = get_database(DBReviewBoardBackend())

        self.api_client = ReviewBoardAPIClient(self.base_url)
Ejemplo n.º 19
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # limit=-1 is NOT recognized as 'all'.  500 is a reasonable limit. - allura code
        issues_per_query = 500
        start_page = 0

        bugs = []
        bugsdb = get_database(DBAlluraBackend())

        # still useless in allura
        bugsdb.insert_supported_traker("allura", "beta")
        trk = Tracker(Config.url, "allura", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date()

        # Date before the first ticket
        time_window_start = "1900-01-01T00:00:00Z"
        time_window_end = datetime.now().isoformat() + "Z"

        if last_mod_date:
            time_window_start = last_mod_date
            printdbg("Last bugs analyzed were modified on: %s" % last_mod_date)

        time_window = time_window_start + " TO  " + time_window_end

        self.url_issues = Config.url + "/search/?limit=1"
        self.url_issues += "&q="
        # A time range with all the tickets
        self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
        printdbg("URL for getting metadata " + self.url_issues)

        f = urllib.urlopen(self.url_issues)
        ticketTotal = json.loads(f.read())

        total_issues = int(ticketTotal['count'])
        total_pages = total_issues / issues_per_query
        print("Number of tickets: " + str(total_issues))

        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)"

        while start_page <= total_pages:
            self.url_issues = Config.url + "/search/?limit=" + str(issues_per_query)
            self.url_issues += "&page=" + str(start_page) + "&q="
            # A time range with all the tickets
            self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
            # Order by mod_date_dt desc
            self.url_issues += "&sort=mod_date_dt+asc"

            printdbg("URL for next issues " + self.url_issues)

            f = urllib.urlopen(self.url_issues)

            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue_data.issue))
Ejemplo n.º 20
0
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho', 'production',
                                       credentials_file=cre_file)

        aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix",
                      "Expired", "Confirmed", "Triaged", "In Progress",
                      "Fix Committed", "Fix Released",
                      "Incomplete (with response)",
                      "Incomplete (without response)"]

        # Check whether the project is a meta project
        lp_project = self.lp.projects[pname]

        if hasattr(lp_project, 'projects'):
            projects = [p for p in lp_project.projects]
        else:
            projects = [lp_project]

        printdbg("%s projects to analyze" % len(projects))

        # Still useless - insert meta project
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        for p in projects:
            trk_url = p.web_link.replace('://', '://bugs.')
            trk = Tracker(trk_url, "launchpad", "x.x")
            dbtrk = bugsdb.insert_tracker(trk)

            last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

            if last_mod_date:
                bugs = p.searchTasks(status=aux_status,
                                     omit_duplicates=False,
                                     order_by='date_last_updated',
                                     modified_since=last_mod_date)
            else:
                bugs = p.searchTasks(status=aux_status,
                                     omit_duplicates=False,
                                     order_by='date_last_updated')

            printdbg("Last bug already cached: %s" % last_mod_date)

            nbugs = len(bugs)

            if nbugs == 0:
                printout("No bugs found on %s" % p.name)
                continue
            else:
                printout("%s bugs found on %s" % (nbugs, p.name))

            self.analyze_project_bugs(bugs, dbtrk, bugsdb)
Ejemplo n.º 21
0
    def run(self):

        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho', 'production',
                                       credentials_file=cre_file)

        aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix",
                      "Expired", "Confirmed", "Triaged", "In Progress",
                      "Fix Committed", "Fix Released",
                      "Incomplete (with response)",
                      "Incomplete (without response)"]

        last_mod_date = bugsdb.get_last_modification_date()

        if last_mod_date:
            bugs = self.lp.projects[pname].searchTasks(status=aux_status,
                                                       omit_duplicates=False,
                                                       order_by='date_last_updated',
                                                       modified_since=last_mod_date)
        else:
            bugs = self.lp.projects[pname].searchTasks(status=aux_status,
                                                       omit_duplicates=False,
                                                       order_by='date_last_updated')
        printdbg("Last bug already cached: %s" % last_mod_date)

        nbugs = len(bugs)

        # still useless
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)
        #

        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        analyzed = []

        for bug in bugs:

            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception:
                #FIXME it does not handle the e
                printerr("Error in function analyzeBug with URL: ' \
                '%s and Bug: %s" % (url, bug))
                raise

            try:
                # we can have meta-trackers but we want to have the original
                #tracker name
                tr_url = self.__get_tracker_url_from_bug(bug)
                if (tr_url != url):
                    aux_trk = Tracker(tr_url, "launchpad", "x.x")
                    dbtrk = bugsdb.insert_tracker(aux_trk)
                bugsdb.insert_issue(issue_data, dbtrk.id)
            except UnicodeEncodeError:
                printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored"
                         % (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
            time.sleep(self.delay)
Ejemplo n.º 22
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # redmine 1.0 support
        last_page = 1
        tickets_page = 25  # fixed redmine

        bugs = []
        bugsdb = get_database(DBRedmineBackend())

        # still useless in redmine
        bugsdb.insert_supported_traker("redmine", "beta")
        trk = Tracker(Config.url, "redmine", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        updated_on = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
        self.url_issues = self._get_issues_url(updated_on)
        url = self.url_issues + "&page=" + str(last_page)
        request = urllib2.Request(url)

        if self.backend_user:
            base64string = base64.encodestring(
                '%s:%s' %
                (Config.backend_user, Config.backend_password)).replace(
                    '\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)

        # Get statuses
        self._get_statuses()

        f = urllib2.urlopen(request)
        tickets = json.loads(f.read())

        if not tickets["issues"]:
            printout("Done. No new bugs to analyze")
            return

        for ticket in tickets["issues"]:
            issue = self.analyze_bug(ticket)
            bugsdb.insert_issue(issue, dbtrk.id)
            time.sleep(self.delay)

        last_ticket = tickets["issues"][0]['id']

        while True:
            last_page += 1
            url = self.url_issues + "&page=" + str(last_page)
            request = urllib2.Request(url)
            #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '')
            #request.add_header("Authorization", "Basic %s" % base64string)
            f = urllib2.urlopen(request)
            tickets = json.loads(f.read())

            if len(tickets['issues']) == 0:
                break

            pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) +
                          " " + str(tickets["issues"][-1]['id']))

            if tickets["issues"][0]['id'] == last_ticket:
                break

            for ticket in tickets["issues"]:
                issue = self.analyze_bug(ticket)
                bugsdb.insert_issue(issue, dbtrk.id)
                time.sleep(self.delay)

        pprint.pprint("Total pages: " + str(last_page))

        printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
Ejemplo n.º 23
0
    def run(self):

        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho',
                                       'production',
                                       credentials_file=cre_file)

        aux_status = [
            "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired",
            "Confirmed", "Triaged", "In Progress", "Fix Committed",
            "Fix Released", "Incomplete (with response)",
            "Incomplete (without response)"
        ]

        # still useless
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            bugs = self.lp.projects[pname].searchTasks(
                status=aux_status,
                omit_duplicates=False,
                order_by='date_last_updated',
                modified_since=last_mod_date)
        else:
            bugs = self.lp.projects[pname].searchTasks(
                status=aux_status,
                omit_duplicates=False,
                order_by='date_last_updated')
        printdbg("Last bug already cached: %s" % last_mod_date)

        nbugs = len(bugs)

        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        analyzed = []

        for bug in bugs:

            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception:
                #FIXME it does not handle the e
                printerr("Error in function analyzeBug with URL: ' \
                '%s and Bug: %s" % (url, bug))
                raise

            try:
                # we can have meta-trackers but we want to have the original
                #tracker name
                tr_url = self.__get_tracker_url_from_bug(bug)
                if (tr_url != url):
                    aux_trk = Tracker(tr_url, "launchpad", "x.x")
                    dbtrk = bugsdb.insert_tracker(aux_trk)
                bugsdb.insert_issue(issue_data, dbtrk.id)
            except UnicodeEncodeError:
                printerr(
                    "UnicodeEncodeError: the issue %s couldn't be stored" %
                    (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored" %
                         (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored" %
                         (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
            time.sleep(self.delay)
Ejemplo n.º 24
0
Archivo: taiga.py Proyecto: acs/Bicho
    def run(self):
        """
        """
        logging.info("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # limit=-1 is NOT recognized as 'all'.  500 is a reasonable limit. - taigaTickets code
        issues_per_query = 500
        start_page = 0

        bugs = []
        bugsdb = get_database(DBTaigaBackend())

        self.url_api = Config.url+"/api/v1"
        bugsdb.insert_supported_traker("taigaIssues", "beta")
        bugsdb.insert_supported_traker("taigaTasks", "beta")
        bugsdb.insert_supported_traker("taigaUserstories", "beta")
        dbtrk_issues = bugsdb.insert_tracker(Tracker(self.url_api+"/issues", "taigaIssues", "beta"))
        dbtrk_tasks = bugsdb.insert_tracker(Tracker(self.url_api+"/tasks", "taigaTasks", "beta"))
        dbtrk_userstories = bugsdb.insert_tracker(Tracker(self.url_api+"/userstories", "taigaUserstories", "beta"))

        self.url_issues =  self.url_api + "/issues"
        self.url_tasks =  self.url_api + "/tasks"
        self.url_userstories =  self.url_api + "/userstories"
        self.url_users =  self.url_api + "/users"
        self.url_auth =  self.url_api + "/auth"
        self.url_history_issue =  self.url_api + "/history/issue/"
        self.url_history_task =  self.url_api + "/history/task/"
        self.url_history_userstory =  self.url_api + "/history/userstory/"
        logging.info("URL for getting issues " + self.url_issues)

        auth_token = "eyJ1c2VyX2F1dGhlbnRpY2F0aW9uX2lkIjoyfQ:1XrNRx:fLCEb_ZV4A8jsY9NLbZ7i9MtXMo"

        # Authentication and get all tickets without pagination
        # TODO: support pagination
        headers = {}
        headers["Authorization"] = "Bearer " + auth_token
        headers["x-disable-pagination"] = True

        # Get users info in order to change identifiers with real names
        try:
            request = urllib2.Request(self.url_users, headers=headers)
            f = urllib2.urlopen(request)
            users = json.loads(f.read())
            self.users = users
        except urllib2.HTTPError:
            logging.info("You don't have permissions to get user info.")
            self.users = None

        # Now we need issues, tasks and user stories
        request = urllib2.Request(self.url_issues, headers=headers)
        f = urllib2.urlopen(request)
        issues = json.loads(f.read())
        request = urllib2.Request(self.url_tasks, headers=headers)
        f = urllib2.urlopen(request)
        tasks = json.loads(f.read())
        request = urllib2.Request(self.url_userstories, headers=headers)
        f = urllib2.urlopen(request)
        userstories = json.loads(f.read())


        total_issues = len(issues)
        total_pages = total_issues / issues_per_query
        print("Number of tickets: " + str(len(issues)))
        print("Number of tasks: " + str(len(tasks)))
        print("Number of user stories: " + str(len(userstories)))

        if total_issues == 0:
            logging.info("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        # print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)"
        nissues = self.parse_issues(issues, self.url_history_issue, auth_token, bugsdb, dbtrk_issues.id)
        logging.info("Done. Issues analyzed:" + str(nissues))
        ntasks = self.parse_issues(tasks, self.url_history_task, auth_token, bugsdb, dbtrk_tasks.id)
        logging.info("Done. Tasks analyzed:" + str(ntasks))
        nuserstories = self.parse_issues(userstories, self.url_history_userstory, auth_token, bugsdb, dbtrk_userstories.id)
        logging.info("Done. User stories analyzed:" + str(nuserstories))
Ejemplo n.º 25
0
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugsdb = get_database(DBGithubBackend())

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        printdbg(url)

        bugsdb.insert_supported_traker("github", "v3")
        trk = Tracker(url, "github", "v3")
        dbtrk = bugsdb.insert_tracker(trk)

        self.bugs_state = ALL_STATES
        self.pagecont = 1
        self.mod_date = None

        aux_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if aux_date:
            self.mod_date = aux_date.isoformat()
            printdbg("Last issue already cached: %s" % self.mod_date)

        try:
            bugs = self.__get_batch_bugs()
        except GitHubRateLimitReached:
            printout("GitHub rate limit reached. To resume, wait some minutes.")
            sys.exit(0)

        nbugs = len(bugs)

        if len(bugs) == 0:
            if aux_date:
                printout("Bicho database up to date")
            else:
                printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        auxcont = 0
        while len(bugs) > 0:

            for bug in bugs:
                try:
                    issue_data = self.analyze_bug(bug)
                except GitHubRateLimitReached:
                    printout("GitHub rate limit reached. To resume, wait some minutes.")
                    sys.exit(0)
                except Exception:
                    #FIXME it does not handle the e
                    msg = "Error in function analyzeBug with URL: %s and bug: %s" % (url, bug)
                    printerr(msg)
                    raise

                try:
                    # we can have meta-trackers but we want to have the
                    # original tracker name
                    tr_url = self.__get_tracker_url_from_bug(bug)
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored"
                        % (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg ("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1

            try:
                bugs = self.__get_batch_bugs()
            except GitHubRateLimitReached:
                printout("GitHub rate limit reached. To resume, wait some minutes.")
                sys.exit(0)

            nbugs = nbugs + len(bugs)
Ejemplo n.º 26
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # redmine 1.0 support
        last_page = 1
        tickets_page = 25  # fixed redmine

        bugs = []
        bugsdb = get_database(DBRedmineBackend())

        # still useless in redmine
        bugsdb.insert_supported_traker("redmine", "beta")
        trk = Tracker(Config.url, "redmine", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        updated_on = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
        self.url_issues = self._get_issues_url(updated_on)
        url = self.url_issues + "&page=" + str(last_page)
        request = urllib2.Request(url)

        if self.backend_user:
            base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)   

        # Get statuses
        self._get_statuses()

        f = urllib2.urlopen(request)         
        tickets = json.loads(f.read())
        for ticket in tickets["issues"]:
            issue = self.analyze_bug(ticket)
            bugsdb.insert_issue(issue, dbtrk.id)
            time.sleep(self.delay)

        last_ticket = tickets["issues"][0]['id']

        while True:
            last_page += 1
            url = self.url_issues + "&page=" + str(last_page)
            request = urllib2.Request(url)
            #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '')
            #request.add_header("Authorization", "Basic %s" % base64string)
            f = urllib2.urlopen(request)
            tickets = json.loads(f.read())

            if len(tickets['issues']) == 0:
                break

            pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id']))

            if tickets["issues"][0]['id'] == last_ticket:
                break

            for ticket in tickets["issues"]:
                issue = self.analyze_bug(ticket)
                bugsdb.insert_issue(issue, dbtrk.id)
                time.sleep(self.delay)

        pprint.pprint("Total pages: " + str(last_page))

        printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
Ejemplo n.º 27
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # limit=-1 is NOT recognized as 'all'.  500 is a reasonable limit. - allura code
        issues_per_query = 500
        start_page = 0

        bugs = []
        bugsdb = get_database(DBAlluraBackend())

        # still useless in allura
        bugsdb.insert_supported_traker("allura", "beta")
        trk = Tracker(Config.url, "allura", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date()

        # Date before the first ticket
        time_window_start = "1900-01-01T00:00:00Z"
        time_window_end = datetime.now().isoformat() + "Z"

        if last_mod_date:
            time_window_start = last_mod_date
            printdbg("Last bugs analyzed were modified on: %s" % last_mod_date)

        time_window = time_window_start + " TO  " + time_window_end

        self.url_issues = Config.url + "/search/?limit=1"
        self.url_issues += "&q="
        # A time range with all the tickets
        self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
        printdbg("URL for getting metadata " + self.url_issues)

        f = urllib.urlopen(self.url_issues)
        ticketTotal = json.loads(f.read())

        total_issues = int(ticketTotal['count'])
        total_pages = total_issues / issues_per_query
        print("Number of tickets: " + str(total_issues))

        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (
            total_issues * Config.delay) / (60 * 60), "h)"

        while start_page <= total_pages:
            self.url_issues = Config.url + "/search/?limit=" + str(
                issues_per_query)
            self.url_issues += "&page=" + str(start_page) + "&q="
            # A time range with all the tickets
            self.url_issues += urllib.quote("mod_date_dt:[" + time_window +
                                            "]")
            # Order by mod_date_dt desc
            self.url_issues += "&sort=mod_date_dt+asc"

            printdbg("URL for next issues " + self.url_issues)

            f = urllib.urlopen(self.url_issues)

            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
Ejemplo n.º 28
0
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugsdb = get_database(DBGithubBackend())

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        printdbg(url)

        bugsdb.insert_supported_traker("github", "v3")
        trk = Tracker(url, "github", "v3")
        dbtrk = bugsdb.insert_tracker(trk)

        self.bugs_state = ALL_STATES
        self.pagecont = 1
        self.mod_date = None

        aux_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if aux_date:
            self.mod_date = aux_date.isoformat()
            printdbg("Last issue already cached: %s" % self.mod_date)

        try:
            bugs = self.__get_batch_bugs()
        except GitHubRateLimitReached:
            printout(
                "GitHub rate limit reached. To resume, wait some minutes.")
            sys.exit(0)

        nbugs = len(bugs)

        if len(bugs) == 0:
            if aux_date:
                printout("Bicho database up to date")
            else:
                printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        auxcont = 0
        while len(bugs) > 0:

            for bug in bugs:
                try:
                    issue_data = self.analyze_bug(bug)
                except GitHubRateLimitReached:
                    printout(
                        "GitHub rate limit reached. To resume, wait some minutes."
                    )
                    sys.exit(0)
                except Exception:
                    #FIXME it does not handle the e
                    msg = "Error in function analyzeBug with URL: %s and bug: %s" % (
                        url, bug)
                    printerr(msg)
                    raise

                try:
                    # we can have meta-trackers but we want to have the
                    # original tracker name
                    tr_url = self.__get_tracker_url_from_bug(bug)
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1

            try:
                bugs = self.__get_batch_bugs()
            except GitHubRateLimitReached:
                printout(
                    "GitHub rate limit reached. To resume, wait some minutes.")
                sys.exit(0)

            nbugs = nbugs + len(bugs)
Ejemplo n.º 29
0
    def run(self):
        print ("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + "launchpad-credential")
        self.lp = Launchpad.login_with("Bicho", "production", credentials_file=cre_file)

        aux_status = [
            "New",
            "Incomplete",
            "Opinion",
            "Invalid",
            "Won't Fix",
            "Expired",
            "Confirmed",
            "Triaged",
            "In Progress",
            "Fix Committed",
            "Fix Released",
            "Incomplete (with response)",
            "Incomplete (without response)",
        ]

        # Check whether the project is a meta project
        lp_project = self.lp.projects[pname]

        if hasattr(lp_project, "projects"):
            projects = [p for p in lp_project.projects]
        else:
            projects = [lp_project]

        printdbg("%s projects to analyze" % len(projects))

        # Still useless - insert meta project
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        for p in projects:
            trk_url = p.web_link.replace("://", "://bugs.")
            trk = Tracker(trk_url, "launchpad", "x.x")
            dbtrk = bugsdb.insert_tracker(trk)

            last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

            if last_mod_date:
                bugs = p.searchTasks(
                    status=aux_status, omit_duplicates=False, order_by="date_last_updated", modified_since=last_mod_date
                )
            else:
                bugs = p.searchTasks(status=aux_status, omit_duplicates=False, order_by="date_last_updated")

            printdbg("Last bug already cached: %s" % last_mod_date)

            nbugs = len(bugs)

            if nbugs == 0:
                printout("No bugs found on %s" % p.name)
                continue
            else:
                printout("%s bugs found on %s" % (nbugs, p.name))

            self.analyze_project_bugs(bugs, dbtrk, bugsdb)
Ejemplo n.º 30
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 250
        start_issue = 1

        bugs = []
        bugsdb = get_database(DBGoogleCodeBackend())

        # still useless
        bugsdb.insert_supported_traker("googlecode", "beta")
        trk = Tracker(Config.url, "googlecode", "beta")

        dbtrk = bugsdb.insert_tracker(trk)

        self.url = Config.url

        #  https://code.google.com/feeds/issues/p/mobile-time-care
        self.url_issues = Config.url + "/issues/full?max-results=1"
        printdbg("URL for getting metadata " + self.url_issues)

        d = feedparser.parse(self.url_issues)

        total_issues = int(d['feed']['opensearch_totalresults'])
        print "Total bugs: ", total_issues
        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (
            total_issues * Config.delay) / (60 * 60), "h)"

        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(
                issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m", " issues ", str(
                            remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue.issue))