示例#1
0
    def fetch_and_store_tickets(self):
        printdbg("Fetching tickets")

        nbugs = 0

        # Insert tracker information
        dbtrk = self.insert_tracker(self.url)

        last_mod_date = self.db.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            printdbg("Last modification date stored: %s" % last_mod_date)

        trac_tickets = self.trac_rpc.tickets(last_mod_date)

        for ticket_id in trac_tickets:
            printdbg("Fetching ticket %s" % str(ticket_id))
            ticket = self.trac_rpc.ticket(ticket_id)

            issue = self.get_issue_from_ticket(ticket)

            # Insert issue
            self.db.insert_issue(issue, dbtrk.id)

            nbugs += 1
            time.sleep(self.delay)

        printout("Done. %s bugs analyzed from %s" % (nbugs, len(trac_tickets)))
示例#2
0
    def fetch_and_store_tickets(self):
        printdbg("Fetching tickets")

        nbugs = 0

        # Insert tracker information
        dbtrk = self.insert_tracker(self.url)

        last_mod_date = self.db.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            printdbg("Last modification date stored: %s" % last_mod_date)

        trac_tickets = self.trac_rpc.tickets(last_mod_date)

        for ticket_id in trac_tickets:
            printdbg("Fetching ticket %s" % str(ticket_id))
            ticket = self.trac_rpc.ticket(ticket_id)

            issue = self.get_issue_from_ticket(ticket)

            # Insert issue
            self.db.insert_issue(issue, dbtrk.id)

            nbugs += 1
            time.sleep(self.delay)

        printout("Done. %s bugs analyzed from %s" % (nbugs, len(trac_tickets)))
示例#3
0
文件: atljira.py 项目: hjmacho/Bicho
    def run(self):
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 100
        
        bugsdb = get_database(DBJiraBackend())

        bugsdb.insert_supported_traker("jira", "4.1.2")
        trk = Tracker(self.url.split("-")[0], "jira", "4.1.2")
        dbtrk = bugsdb.insert_tracker(trk)

        serverUrl = self.url.split("/browse/")[0]
        query = "/si/jira.issueviews:issue-xml/"
        project = self.url.split("/browse/")[1]

        options_jira = {
            'server': self.serverUrl
        }
        
        jira = JIRA(options_jira)

        if (project.split("-").__len__() > 1):
            bug_key = project
            project = project.split("-")[0]
            bugs_number = self.bugsNumber(jira)

            try:
                issue = jira.issue(bug_key,expand='changelog')
                self.analyze_bug_list(issue, self.serverUrl+'/browse/', bugsdb, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)
示例#4
0
    def run(self):
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        self.conn.login(self.url, self.backend_user, self.backend_password)

        bugsdb = get_database(DBJiraBackend())

        bugsdb.insert_supported_traker("jira", "4.1.2")
        trk = Tracker(self.url.split("-")[0], "jira", "4.1.2")
        dbtrk = bugsdb.insert_tracker(trk)

        serverUrl = self.url.split("/browse/")[0]
        query = "/si/jira.issueviews:issue-xml/"
        project = self.url.split("/browse/")[1]

        if (project.split("-").__len__() > 1):
            bug_key = project
            project = project.split("-")[0]
            bugs_number = 1

            printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml")

            parser = xml.sax.make_parser()
            handler = BugsHandler()
            parser.setContentHandler(handler)
            try:
                parser.parse(serverUrl + query + bug_key + "/" + bug_key +
                             ".xml")
                issue = handler.getIssues(self.conn)[0]
                bugsdb.insert_issue(issue, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)
示例#5
0
文件: jira.py 项目: acs/Bicho
    def login(self, url, user=None, password=None):
        """
        Authenticates a user in a Jira tracker
        """
        if not (user and password):
            printout("No account data provided. Not logged in Jira")
            return

        import cookielib

        cookie_j = cookielib.CookieJar()
        cookie_h = urllib2.HTTPCookieProcessor(cookie_j)

        auth_info = user + ':' + password
        auth_info = auth_info.replace('\n', '')
        base64string = base64.encodestring(auth_info)

        request = urllib2.Request(url)
        request.add_header("Authorization", "Basic %s" % base64string)

        opener = urllib2.build_opener(cookie_h)
        urllib2.install_opener(opener)

        urllib2.urlopen(request)
        for i, c in enumerate(cookie_j):
            self.cookies[c.name] = c.value

        printout("Logged in Jira as %s" % user)
        printdbg("Jira session cookies: %s" % self.cookies)
示例#6
0
    def login(self, url, user=None, password=None):
        """
        Authenticates a user in a Jira tracker
        """
        if not (user and password):
            printout("No account data provided. Not logged in Jira")
            return

        import cookielib

        cookie_j = cookielib.CookieJar()
        cookie_h = urllib2.HTTPCookieProcessor(cookie_j)

        auth_info = user + ':' + password
        auth_info = auth_info.replace('\n', '')
        base64string = base64.encodestring(auth_info)

        request = urllib2.Request(url)
        request.add_header("Authorization", "Basic %s" % base64string)

        opener = urllib2.build_opener(cookie_h)
        urllib2.install_opener(opener)

        urllib2.urlopen(request)
        for i, c in enumerate(cookie_j):
            self.cookies[c.name] = c.value

        printout("Logged in Jira as %s" % user)
        printdbg("Jira session cookies: %s" % self.cookies)
示例#7
0
    def _login(self):
        """
        Authenticates a user in a bugzilla tracker
        """
        if not (self.backend_user and self.backend_password):
            printdbg("No account data provided. Not logged in bugzilla")
            return

        import cookielib

        cookie_j = cookielib.CookieJar()
        cookie_h = urllib2.HTTPCookieProcessor(cookie_j)

        url = self._get_login_url(self.url)
        values = {'Bugzilla_login': self.backend_user,
                  'Bugzilla_password': self.backend_password}

        opener = urllib2.build_opener(cookie_h)
        urllib2.install_opener(opener)
        data = urllib.urlencode(values)
        request = urllib2.Request(url, data)
        urllib2.urlopen(request)
        for i, c in enumerate(cookie_j):
            self.cookies[c.name] = c.value

        printout("Logged in bugzilla as %s" % self.backend_user)
        printdbg("Bugzilla session cookies: %s" % self.cookies)
示例#8
0
文件: jira.py 项目: adamlofting/Bicho
    def run(self):
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_xml_query = 500
        bugsdb = get_database(DBJiraBackend())

        bugsdb.insert_supported_traker("jira", "4.1.2")
        trk = Tracker(self.url.split("-")[0], "jira", "4.1.2")
        dbtrk = bugsdb.insert_tracker(trk)

        serverUrl = self.url.split("/browse/")[0]
        query = "/si/jira.issueviews:issue-xml/"
        project = self.url.split("/browse/")[1]

        if (project.split("-").__len__() > 1):
            bug_key = project
            project = project.split("-")[0]
            bugs_number = 1

            printdbg(serverUrl + query + bug_key + "/" + bug_key + ".xml")

            parser = xml.sax.make_parser()
            handler = BugsHandler()
            parser.setContentHandler(handler)
            try:
                parser.parse(serverUrl + query + bug_key + "/" + bug_key + ".xml")
                issue = handler.getIssues()[0]
                bugsdb.insert_issue(issue, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)
示例#9
0
    def _login(self):
        """
        Authenticates a user in a bugzilla tracker
        """
        if not (self.backend_user and self.backend_password):
            printdbg("No account data provided. Not logged in bugzilla")
            return

        import cookielib

        cookie_j = cookielib.CookieJar()
        cookie_h = urllib2.HTTPCookieProcessor(cookie_j)

        url = self._get_login_url(self.url)
        values = {
            'Bugzilla_login': self.backend_user,
            'Bugzilla_password': self.backend_password
        }

        opener = urllib2.build_opener(cookie_h)
        urllib2.install_opener(opener)
        data = urllib.urlencode(values)
        request = urllib2.Request(url, data)
        urllib2.urlopen(request)
        for i, c in enumerate(cookie_j):
            self.cookies[c.name] = c.value

        printout("Logged in bugzilla as %s" % self.backend_user)
        printdbg("Bugzilla session cookies: %s" % self.cookies)
示例#10
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugs = []
        bugsdb = get_database(DBGerritBackend())

        # still useless in gerrit
        bugsdb.insert_supported_traker("gerrit", "beta")
        trk = Tracker(Config.url + "_" + Config.gerrit_project, "gerrit", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_time = 0
        last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
        if last_mod_date:
            printdbg("Last reviews analyzed were modified on date: %s"
                     % last_mod_date)
            last_mod_time = time.mktime(time.strptime(last_mod_date, '%Y-%m-%d %H:%M:%S'))

        limit = 500  # gerrit default 500
        last_item = ""
        # last_item = "001f672c00002f80";
        number_results = limit
        total_reviews = 0

        while (number_results == limit or
               number_results == limit + 1):  # wikimedia gerrit returns limit+1
            # ordered by lastUpdated
            tickets = self.getReviews(limit, last_item)
            number_results = 0

            reviews = []
            for entry in tickets:
                if 'project' in entry.keys():
                    if (entry['lastUpdated'] < last_mod_time):
                        break
                    reviews.append(entry["number"])
                    review_data = self.analyze_review(entry)

                    if review_data is None:
                        pprint.pprint("ERROR in review. Ignoring it.")
                        continue

                    last_item = entry['sortKey']
                    # extra changes not included in gerrit changes
                    # self.add_merged_abandoned_changes_from_comments(entry, review_data)
                    self.add_merged_abandoned_changes(entry, review_data)
                    self.add_uploaded_patchset_from_comments(entry, review_data)
                    self.add_new_change(review_data)
                    bugsdb.insert_issue(review_data, dbtrk.id)
                    number_results += 1
                elif 'rowCount' in entry.keys():
                    pprint.pprint(entry)
                    printdbg("CONTINUE FROM: " + last_item)
            total_reviews = total_reviews + int(number_results)
        self.check_merged_abandoned_changes(bugsdb.store, dbtrk.id)

        print("Done. Number of reviews: " + str(total_reviews))
示例#11
0
    def run(self):
        printout("Running Bicho with delay of %s seconds - %s" % (self.delay, self.url))

        try:
            self.fetch_and_store_tickets()
        except (requests.exceptions.HTTPError, TracRPCError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#12
0
    def run(self):
        printout("Running Bicho - url: %s" % self.url)

        try:
            self.fetch_and_store()
        except (requests.exceptions.HTTPError, ReviewBoardAPIError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#13
0
    def run(self):
        printout("Running Bicho - %s" % self.url)

        try:
            self.fetch_and_store_tickets()
        except (requests.exceptions.HTTPError, TracRPCError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#14
0
    def run(self):
        printout("Running Bicho - url: %s" % self.url)

        try:
            self.fetch_and_store()
        except (requests.exceptions.HTTPError, ReviewBoardAPIError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#15
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 250
        start_issue = 1

        bugs = []
        bugsdb = get_database(DBGoogleCodeBackend())

        # still useless
        bugsdb.insert_supported_traker("googlecode", "beta")
        trk = Tracker(Config.url, "googlecode", "beta")

        dbtrk = bugsdb.insert_tracker(trk)

        self.url = Config.url

       #  https://code.google.com/feeds/issues/p/mobile-time-care
        self.url_issues = Config.url + "/issues/full?max-results=1"
        printdbg("URL for getting metadata " + self.url_issues)

        d = feedparser.parse(self.url_issues)

        total_issues = int(d['feed']['opensearch_totalresults'])
        print "Total bugs: ", total_issues
        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)"

        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue.issue))
示例#16
0
    def run(self):
        printout("Running Bicho with delay of %s seconds - %s" %
                 (self.delay, self.url))

        try:
            self.fetch_and_store_tickets()
        except (requests.exceptions.HTTPError, TracRPCError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#17
0
    def run(self):
        printout("Running Bicho - %s" % self.url)

        if not self.check_auth():
            sys.exit(1)

        try:
            self.fetch_and_store_tasks()
        except (requests.exceptions.HTTPError, ConduitError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#18
0
    def run(self):
        printout("Running Bicho - %s" % self.url)

        if not self.check_auth():
            sys.exit(1)

        try:
            self.fetch_and_store_tasks()
        except (requests.exceptions.HTTPError, ConduitError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
示例#19
0
    def __init__(self):
        self.delay = Config.delay
        self.identities = {}
        self.statuses = {}

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No account provided.")
            self.backend_password = None
            self.backend_user = None
示例#20
0
    def __init__(self):
        self.delay = Config.delay
        self.identities = {}
        self.statuses = {}

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No account provided.")
            self.backend_password = None
            self.backend_user = None
示例#21
0
    def __init__(self):
        self.delay = Config.delay
        self.url = Config.url
        self.max_issues = Config.nissues

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No account provided.")
            self.backend_password = None
            self.backend_user = None

        self.conn = JiraConnection()
示例#22
0
文件: jira.py 项目: acs/Bicho
    def __init__(self):
        self.delay = Config.delay
        self.url = Config.url
        self.max_issues = Config.nissues

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No account provided.")
            self.backend_password = None
            self.backend_user = None

        self.conn = JiraConnection()
示例#23
0
文件: bg.py 项目: adamlofting/Bicho
    def __init__(self):
        self.url = self._healthy_url(Config.url)
        self.delay = Config.delay
        self.cookies = {}
        self.version = None
        self.tracker = None
        self.retrieved = {}  # retrieved issues on this run

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No Bugzilla account provided, mail addresses won't " +
                     "be retrieved")
            self.backend_password = None
            self.backend_user = None

        self.bugsdb = get_database(DBBugzillaBackend())
示例#24
0
    def __init__(self):
        self.url = self._healthy_url(Config.url)
        self.delay = Config.delay
        self.max_issues = Config.nissues
        self.cookies = {}
        self.version = None
        self.tracker = None
        self.retrieved = {}  # retrieved issues on this run

        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printout("No Bugzilla account provided, mail addresses won't " +
                     "be retrieved")
            self.backend_password = None
            self.backend_user = None

        self.bugsdb = get_database(DBBugzillaBackend())
示例#25
0
    def _process_issues(self):
        if self._is_issue_url(self.url):
            # FIXME: this only works for one issue, if more id parameters
            # are set, those issues will not be processed
            ids = [self.url.split("show_bug.cgi?id=")[1]]
            printdbg("Issue #%s URL found" % ids[0])
            url = self._get_domain(self.url)
            self._retrieve_issues(ids, url, self.tracker.id)
        else:
            i = 0
            max_rounds = 50  # 50*10000
            url = self._get_domain(self.url)
            last_date, next_date = self._get_last_and_next_dates()

            # Some bugzillas limit the number of results that a query can return.
            # Due to this, bicho will search for new issues/changes until there
            # are no more new ones.
            ids = self._retrieve_issues_ids(self.url, self.version, next_date)

            while (ids):
                if (i >= max_rounds):
                    break
                printout("Round #%d - Total issues to retrieve: %d" %
                         (i, len(ids)))
                self._retrieve_issues(ids, url, self.tracker.id)
                i += 1
                # Search new ids, but first, we have to check whether they are
                # already stored or not
                last_date, next_date = self._get_last_and_next_dates()
                ids = self._retrieve_issues_ids(self.url, self.version,
                                                last_date)
                # If there aren't new issues from the same date, ask for a new one
                if not ids:
                    printdbg("No issues found for date %s. Trying with %s" %
                             (last_date, next_date))
                    ids = self._retrieve_issues_ids(self.url, self.version,
                                                    next_date)

            if i > 0:
                printout("No more issues to retrieve")
示例#26
0
    def _process_issues(self):
        if self._is_issue_url(self.url):
            # FIXME: this only works for one issue, if more id parameters
            # are set, those issues will not be processed
            ids = [self.url.split("show_bug.cgi?id=")[1]]
            printdbg("Issue #%s URL found" % ids[0])
            url = self._get_domain(self.url)
            self._retrieve_issues(ids, url, self.tracker.id)
        else:
            i = 0
            max_rounds = 50  # 50*10000
            url = self._get_domain(self.url)
            last_date, next_date = self._get_last_and_next_dates()

            # Some bugzillas limit the number of results that a query can return.
            # Due to this, bicho will search for new issues/changes until there
            # are no more new ones.
            ids = self._retrieve_issues_ids(self.url, self.version, next_date)

            while(ids):
                if (i >= max_rounds):
                    break
                printout("Round #%d - Total issues to retrieve: %d" % (i, len(ids)))
                self._retrieve_issues(ids, url, self.tracker.id)
                i += 1
                # Search new ids, but first, we have to check whether they are
                # already stored or not
                last_date, next_date = self._get_last_and_next_dates()
                ids = self._retrieve_issues_ids(self.url, self.version, last_date)
                # If there aren't new issues from the same date, ask for a new one
                if not ids:
                    printdbg("No issues found for date %s. Trying with %s" % (last_date, next_date))
                    ids = self._retrieve_issues_ids(self.url, self.version, next_date)

            if i > 0:
                printout("No more issues to retrieve")
示例#27
0
    def run(self):
        printout("Running Bicho with delay of %s seconds" % str(self.delay))

        self._login()
        self._set_version()
        self._set_tracker()

        self._process_issues()

        if not self.retrieved:
            printout("No issues found. Did you provide the correct url?")
        else:
            printout("Done. %d issues retrieved" % len(self.retrieved))
示例#28
0
    def run(self):
        printout("Running Bicho with delay of %s seconds" % str(self.delay))

        self._login()
        self._set_version()
        self._set_tracker()

        self._process_issues()

        if not self.retrieved:
            printout("No issues found. Did you provide the correct url?")
        else:
            printout("Done. %d issues retrieved" % len(self.retrieved))
示例#29
0
    def fetch_and_store(self):
        printdbg("Fetching reviews from")

        total_rqs = 0
        nrqs = 0
        offset = 0

        # Insert tracker information
        dbtrk = self.insert_tracker(self.base_url, self.group)

        last_mod_date = self.db.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            printdbg("Last modification date stored: %s" % last_mod_date)

        printout("Fetching reviews requests from %s to %s" %
                 (offset, offset + self.max_issues))

        result = self.api_client.review_requests(offset=offset,
                                                 limit=self.max_issues,
                                                 group=self.group,
                                                 last_date=last_mod_date)
        raw_rqs = result['review_requests']

        while raw_rqs:
            total_rqs += len(raw_rqs)

            for raw_rq in raw_rqs:
                rq = self.get_review_request(raw_rq)

                # Insert review request
                self.db.insert_issue(rq, dbtrk.id)
                nrqs += 1

            time.sleep(self.delay)

            offset += self.max_issues
            printout("Fetching reviews requests from %s to %s" %
                     (offset, offset + self.max_issues))

            result = self.api_client.review_requests(offset=offset,
                                                     limit=self.max_issues,
                                                     group=self.group,
                                                     last_date=last_mod_date)
            raw_rqs = result['review_requests']

        printout("Done. %s review requests analyzed from %s" %
                 (nrqs, total_rqs))
示例#30
0
    def fetch_and_store(self):
        printdbg("Fetching reviews from")

        total_rqs = 0
        nrqs = 0
        offset = 0

        # Insert tracker information
        dbtrk = self.insert_tracker(self.base_url, self.group)

        last_mod_date = self.db.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            printdbg("Last modification date stored: %s" % last_mod_date)

        printout("Fetching reviews requests from %s to %s" % (offset, offset + self.max_issues))

        result = self.api_client.review_requests(offset=offset,
                                                 limit=self.max_issues,
                                                 group=self.group,
                                                 last_date=last_mod_date)
        raw_rqs = result['review_requests']

        while raw_rqs:
            total_rqs += len(raw_rqs)

            for raw_rq in raw_rqs:
                rq = self.get_review_request(raw_rq)

                # Insert review request
                self.db.insert_issue(rq, dbtrk.id)
                nrqs += 1

            time.sleep(self.delay)

            offset += self.max_issues
            printout("Fetching reviews requests from %s to %s" % (offset, offset + self.max_issues))

            result = self.api_client.review_requests(offset=offset,
                                                     limit=self.max_issues,
                                                     group=self.group,
                                                     last_date=last_mod_date)
            raw_rqs = result['review_requests']

        printout("Done. %s review requests analyzed from %s" % (nrqs, total_rqs))
示例#31
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        ids = []
        self.parser = SourceForgeParser()

        #first we take the bugs ids
        if url.find("aid=") > 0:
            aux = url.split("aid=")[1].split("&")[0]
            ids.append(aux)
        else:
            ids = self.__get_issues_list(self.url)

        self.__check_tracker_url(self.url)

        #order the parameters in the url to add the same tracker url
        #to data base without aid parameter
        self.__order_query(self.url)

        self.db = get_database(DBSourceForgeBackend())
        self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0],
                                        SUPPORTED_SF_TRACKERS[1])
        self.__insert_tracker(self.url)

        nbugs = len(ids)
        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        for id in ids:
            url = self.url + '&func=detail&aid=%s' % id  # FIXME:urls!!!
            printdbg(url)
            issue = self.__get_issue(url)
            self.__insert_issue(issue)

            time.sleep(self.delay)

        printout("Done. %s bugs analyzed" % (nbugs))
示例#32
0
文件: sf.py 项目: sferdi/lucenebug
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        ids = []
        self.parser = SourceForgeParser()

        #first we take the bugs ids
        if url.find("aid=") > 0:
            aux = url.split("aid=")[1].split("&")[0]
            ids.append(aux)
        else:
            ids = self.__get_issues_list(self.url)

        self.__check_tracker_url(self.url)

        #order the parameters in the url to add the same tracker url
        #to data base without aid parameter
        self.__order_query(self.url)

        self.db = get_database(DBSourceForgeBackend())
        self.db.insert_supported_traker(SUPPORTED_SF_TRACKERS[0],
                                        SUPPORTED_SF_TRACKERS[1])
        self.__insert_tracker(self.url)

        nbugs = len(ids)
        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        for id in ids:
            url = self.url + '&func=detail&aid=%s' % id  # FIXME:urls!!!
            printdbg(url)
            issue = self.__get_issue(url)
            self.__insert_issue(issue)

            time.sleep(self.delay)

        printout("Done. %s bugs analyzed" % (nbugs))
示例#33
0
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho', 'production',
                                       credentials_file=cre_file)

        aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix",
                      "Expired", "Confirmed", "Triaged", "In Progress",
                      "Fix Committed", "Fix Released",
                      "Incomplete (with response)",
                      "Incomplete (without response)"]

        # Check whether the project is a meta project
        lp_project = self.lp.projects[pname]

        if hasattr(lp_project, 'projects'):
            projects = [p for p in lp_project.projects]
        else:
            projects = [lp_project]

        printdbg("%s projects to analyze" % len(projects))

        # Still useless - insert meta project
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        for p in projects:
            trk_url = p.web_link.replace('://', '://bugs.')
            trk = Tracker(trk_url, "launchpad", "x.x")
            dbtrk = bugsdb.insert_tracker(trk)

            last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

            if last_mod_date:
                bugs = p.searchTasks(status=aux_status,
                                     omit_duplicates=False,
                                     order_by='date_last_updated',
                                     modified_since=last_mod_date)
            else:
                bugs = p.searchTasks(status=aux_status,
                                     omit_duplicates=False,
                                     order_by='date_last_updated')

            printdbg("Last bug already cached: %s" % last_mod_date)

            nbugs = len(bugs)

            if nbugs == 0:
                printout("No bugs found on %s" % p.name)
                continue
            else:
                printout("%s bugs found on %s" % (nbugs, p.name))

            self.analyze_project_bugs(bugs, dbtrk, bugsdb)
示例#34
0
    def fetch_and_store_tasks(self):
        printdbg("Fetching tasks")

        nbugs = 0
        count = 0
        stop = False

        # Insert tracker information
        dbtrk = self.insert_tracker(self.url)

        last_mod_date = self.db.get_last_modification_date(tracker_id=dbtrk.id)

        if self.from_id:
            count = self.from_id
            as_id = True

            printout("Fetching tasks from %s id to %s id" %
                     (count, count + self.max_issues - 1))
        else:
            as_id = False

            if last_mod_date:
                printdbg("Last modification date stored: %s" % last_mod_date)
            if self.start_from:
                printdbg("Ignoring tasks after %s" % str(self.start_from))

            printout("Fetching tasks from %s to %s" %
                     (count, count + self.max_issues))

        ph_tasks = self.conduit.tasks(offset=count,
                                      limit=self.max_issues,
                                      as_id=as_id)

        while ph_tasks:
            for pht in ph_tasks:
                updated_on = unix_to_datetime(pht['dateModified'])

                if self.up_to_date(last_mod_date, updated_on):
                    stop = True
                    break

                if self.start_from and updated_on > self.start_from:
                    printdbg("Skipping task %s - %s" % \
                             (pht['objectName'] ,str(updated_on)))
                    continue

                issue = self.get_issue_from_task(pht)

                # Insert issue
                self.db.insert_issue(issue, dbtrk.id)

                nbugs += 1

            if stop:
                printout("Up to date")
                break

            count = count + self.max_issues

            if as_id:
                printout("Fetching tasks from %s id to %s id" %
                         (count, count + self.max_issues - 1))
            else:
                printout("Fetching tasks from %s to %s" %
                         (count, count + self.max_issues))

            ph_tasks = self.conduit.tasks(offset=count,
                                          limit=self.max_issues,
                                          as_id=as_id)

            if not ph_tasks:
                printdbg("No more tasks fetched")
                printout("Up to date")

        printout("Done. %s bugs analyzed" % (nbugs))
示例#35
0
    def run(self):

        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho',
                                       'production',
                                       credentials_file=cre_file)

        aux_status = [
            "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired",
            "Confirmed", "Triaged", "In Progress", "Fix Committed",
            "Fix Released", "Incomplete (with response)",
            "Incomplete (without response)"
        ]

        # still useless
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            bugs = self.lp.projects[pname].searchTasks(
                status=aux_status,
                omit_duplicates=False,
                order_by='date_last_updated',
                modified_since=last_mod_date)
        else:
            bugs = self.lp.projects[pname].searchTasks(
                status=aux_status,
                omit_duplicates=False,
                order_by='date_last_updated')
        printdbg("Last bug already cached: %s" % last_mod_date)

        nbugs = len(bugs)

        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        analyzed = []

        for bug in bugs:

            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception:
                #FIXME it does not handle the e
                printerr("Error in function analyzeBug with URL: ' \
                '%s and Bug: %s" % (url, bug))
                raise

            try:
                # we can have meta-trackers but we want to have the original
                #tracker name
                tr_url = self.__get_tracker_url_from_bug(bug)
                if (tr_url != url):
                    aux_trk = Tracker(tr_url, "launchpad", "x.x")
                    dbtrk = bugsdb.insert_tracker(aux_trk)
                bugsdb.insert_issue(issue_data, dbtrk.id)
            except UnicodeEncodeError:
                printerr(
                    "UnicodeEncodeError: the issue %s couldn't be stored" %
                    (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored" %
                         (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored" %
                         (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
            time.sleep(self.delay)
示例#36
0
            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
            start_page += 1

        printout("Done. Bugs analyzed:" + str(total_issues - remaining))


Backend.register_backend('allura', Allura)
示例#37
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 250
        start_issue = 1

        bugs = []
        bugsdb = get_database(DBGoogleCodeBackend())

        # still useless
        bugsdb.insert_supported_traker("googlecode", "beta")
        trk = Tracker(Config.url, "googlecode", "beta")

        dbtrk = bugsdb.insert_tracker(trk)

        self.url = Config.url

        #  https://code.google.com/feeds/issues/p/mobile-time-care
        self.url_issues = Config.url + "/issues/full?max-results=1"
        printdbg("URL for getting metadata " + self.url_issues)

        d = feedparser.parse(self.url_issues)

        total_issues = int(d['feed']['opensearch_totalresults'])
        print "Total bugs: ", total_issues
        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (
            total_issues * Config.delay) / (60 * 60), "h)"

        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(
                issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m", " issues ", str(
                            remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue.issue))
示例#38
0
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugsdb = get_database(DBGithubBackend())

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        printdbg(url)

        bugsdb.insert_supported_traker("github", "v3")
        trk = Tracker(url, "github", "v3")
        dbtrk = bugsdb.insert_tracker(trk)

        self.bugs_state = ALL_STATES
        self.pagecont = 1
        self.mod_date = None

        aux_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if aux_date:
            self.mod_date = aux_date.isoformat()
            printdbg("Last issue already cached: %s" % self.mod_date)

        try:
            bugs = self.__get_batch_bugs()
        except GitHubRateLimitReached:
            printout("GitHub rate limit reached. To resume, wait some minutes.")
            sys.exit(0)

        nbugs = len(bugs)

        if len(bugs) == 0:
            if aux_date:
                printout("Bicho database up to date")
            else:
                printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        auxcont = 0
        while len(bugs) > 0:

            for bug in bugs:
                try:
                    issue_data = self.analyze_bug(bug)
                except GitHubRateLimitReached:
                    printout("GitHub rate limit reached. To resume, wait some minutes.")
                    sys.exit(0)
                except Exception:
                    #FIXME it does not handle the e
                    msg = "Error in function analyzeBug with URL: %s and bug: %s" % (url, bug)
                    printerr(msg)
                    raise

                try:
                    # we can have meta-trackers but we want to have the
                    # original tracker name
                    tr_url = self.__get_tracker_url_from_bug(bug)
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored"
                        % (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg ("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1

            try:
                bugs = self.__get_batch_bugs()
            except GitHubRateLimitReached:
                printout("GitHub rate limit reached. To resume, wait some minutes.")
                sys.exit(0)

            nbugs = nbugs + len(bugs)
示例#39
0
        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue.issue))

            start_issue += issues_per_query

        printout("Done. %s bugs analyzed" % (total_issues - remaining))

Backend.register_backend('googlecode', GoogleCode)
示例#40
0
    def fetch_and_store_tasks(self):
        printdbg("Fetching tasks")

        nbugs = 0
        count = 0
        stop = False

        # Insert tracker information
        dbtrk = self.insert_tracker(self.url)

        last_mod_date = self.db.get_last_modification_date(tracker_id=dbtrk.id)

        if self.from_id:
            count = self.from_id
            as_id = True

            printout("Fetching tasks from %s id to %s id" % (count, count + self.max_issues - 1))
        else:
            as_id = False

            if last_mod_date:
                printdbg("Last modification date stored: %s" % last_mod_date)
            if self.start_from:
                printdbg("Ignoring tasks after %s" % str(self.start_from))

            printout("Fetching tasks from %s to %s" % (count, count + self.max_issues))

        ph_tasks = self.conduit.tasks(offset=count,
                                      limit=self.max_issues,
                                      as_id=as_id)

        while ph_tasks:
            for pht in ph_tasks:
                updated_on = unix_to_datetime(pht['dateModified'])

                if self.up_to_date(last_mod_date, updated_on):
                    stop = True
                    break

                if self.start_from and updated_on > self.start_from:
                    printdbg("Skipping task %s - %s" % \
                             (pht['objectName'] ,str(updated_on)))
                    continue

                issue = self.get_issue_from_task(pht)

                # Insert issue
                self.db.insert_issue(issue, dbtrk.id)

                nbugs += 1

            if stop:
                printout("Up to date")
                break

            count = count + self.max_issues

            if as_id:
                printout("Fetching tasks from %s id to %s id" % (count, count + self.max_issues - 1))
            else:
                printout("Fetching tasks from %s to %s" % (count, count + self.max_issues))

            ph_tasks = self.conduit.tasks(offset=count,
                                          limit=self.max_issues,
                                          as_id=as_id)

            if not ph_tasks:
                printdbg("No more tasks fetched")
                printout("Up to date")

        printout("Done. %s bugs analyzed" % (nbugs))
示例#41
0
            f = urllib.urlopen(self.url_issues)

            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue_data.issue))
            start_page += 1

        printout("Done. Bugs analyzed:" + str(total_issues - remaining))

Backend.register_backend('allura', Allura)
示例#42
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # limit=-1 is NOT recognized as 'all'.  500 is a reasonable limit. - allura code
        issues_per_query = 500
        start_page = 0

        bugs = []
        bugsdb = get_database(DBAlluraBackend())

        # still useless in allura
        bugsdb.insert_supported_traker("allura", "beta")
        trk = Tracker(Config.url, "allura", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date()

        # Date before the first ticket
        time_window_start = "1900-01-01T00:00:00Z"
        time_window_end = datetime.now().isoformat() + "Z"

        if last_mod_date:
            time_window_start = last_mod_date
            printdbg("Last bugs analyzed were modified on: %s" % last_mod_date)

        time_window = time_window_start + " TO  " + time_window_end

        self.url_issues = Config.url + "/search/?limit=1"
        self.url_issues += "&q="
        # A time range with all the tickets
        self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
        printdbg("URL for getting metadata " + self.url_issues)

        f = urllib.urlopen(self.url_issues)
        ticketTotal = json.loads(f.read())

        total_issues = int(ticketTotal['count'])
        total_pages = total_issues / issues_per_query
        print("Number of tickets: " + str(total_issues))

        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)"

        while start_page <= total_pages:
            self.url_issues = Config.url + "/search/?limit=" + str(issues_per_query)
            self.url_issues += "&page=" + str(start_page) + "&q="
            # A time range with all the tickets
            self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
            # Order by mod_date_dt desc
            self.url_issues += "&sort=mod_date_dt+asc"

            printdbg("URL for next issues " + self.url_issues)

            f = urllib.urlopen(self.url_issues)

            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue_data.issue))
示例#43
0
            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m", " issues ", str(
                            remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue.issue))

            start_issue += issues_per_query

        printout("Done. %s bugs analyzed" % (total_issues - remaining))


Backend.register_backend('googlecode', GoogleCode)
示例#44
0
                parser.parse(serverUrl + query + bug_key + "/" + bug_key +
                             ".xml")
                issue = handler.getIssues(self.conn)[0]
                bugsdb.insert_issue(issue, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)

        else:
            self.last_mod_date = bugsdb.get_last_modification_date(
                tracker_id=dbtrk.id)
            if self.last_mod_date:
                # self.url = self.url + "&updated:after=" + last_mod_date
                printdbg("Last bugs cached were modified at: %s" %
                         self.last_mod_date)

            bugs_number = self.bugsNumber(self.url)
            print "Tickets to be retrieved:", str(bugs_number)
            remaining = bugs_number
            while (remaining > 0):
                self.analyze_bug_list(self.max_issues, bugs_number - remaining,
                                      bugsdb, dbtrk.id)
                remaining -= self.max_issues
                #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")"
                time.sleep(self.delay)

            printout("Done. %s bugs analyzed" % (bugs_number))


Backend.register_backend("jira", JiraBackend)
示例#45
0
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugsdb = get_database(DBGithubBackend())

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        printdbg(url)

        bugsdb.insert_supported_traker("github", "v3")
        trk = Tracker(url, "github", "v3")
        dbtrk = bugsdb.insert_tracker(trk)

        self.bugs_state = ALL_STATES
        self.pagecont = 1
        self.mod_date = None

        aux_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if aux_date:
            self.mod_date = aux_date.isoformat()
            printdbg("Last issue already cached: %s" % self.mod_date)

        try:
            bugs = self.__get_batch_bugs()
        except GitHubRateLimitReached:
            printout(
                "GitHub rate limit reached. To resume, wait some minutes.")
            sys.exit(0)

        nbugs = len(bugs)

        if len(bugs) == 0:
            if aux_date:
                printout("Bicho database up to date")
            else:
                printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        auxcont = 0
        while len(bugs) > 0:

            for bug in bugs:
                try:
                    issue_data = self.analyze_bug(bug)
                except GitHubRateLimitReached:
                    printout(
                        "GitHub rate limit reached. To resume, wait some minutes."
                    )
                    sys.exit(0)
                except Exception:
                    #FIXME it does not handle the e
                    msg = "Error in function analyzeBug with URL: %s and bug: %s" % (
                        url, bug)
                    printerr(msg)
                    raise

                try:
                    # we can have meta-trackers but we want to have the
                    # original tracker name
                    tr_url = self.__get_tracker_url_from_bug(bug)
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1

            try:
                bugs = self.__get_batch_bugs()
            except GitHubRateLimitReached:
                printout(
                    "GitHub rate limit reached. To resume, wait some minutes.")
                sys.exit(0)

            nbugs = nbugs + len(bugs)
示例#46
0
文件: lp.py 项目: athanrous/Bicho
    def run(self):

        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho', 'production',
                                       credentials_file=cre_file)

        aux_status = ["New", "Incomplete", "Opinion", "Invalid", "Won't Fix",
                      "Expired", "Confirmed", "Triaged", "In Progress",
                      "Fix Committed", "Fix Released",
                      "Incomplete (with response)",
                      "Incomplete (without response)"]

        last_mod_date = bugsdb.get_last_modification_date()

        if last_mod_date:
            bugs = self.lp.projects[pname].searchTasks(status=aux_status,
                                                       omit_duplicates=False,
                                                       order_by='date_last_updated',
                                                       modified_since=last_mod_date)
        else:
            bugs = self.lp.projects[pname].searchTasks(status=aux_status,
                                                       omit_duplicates=False,
                                                       order_by='date_last_updated')
        printdbg("Last bug already cached: %s" % last_mod_date)

        nbugs = len(bugs)

        # still useless
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)
        #

        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        analyzed = []

        for bug in bugs:

            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception:
                #FIXME it does not handle the e
                printerr("Error in function analyzeBug with URL: ' \
                '%s and Bug: %s" % (url, bug))
                raise

            try:
                # we can have meta-trackers but we want to have the original
                #tracker name
                tr_url = self.__get_tracker_url_from_bug(bug)
                if (tr_url != url):
                    aux_trk = Tracker(tr_url, "launchpad", "x.x")
                    dbtrk = bugsdb.insert_tracker(aux_trk)
                bugsdb.insert_issue(issue_data, dbtrk.id)
            except UnicodeEncodeError:
                printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored"
                         % (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
            time.sleep(self.delay)
示例#47
0
                         % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored"
                         % (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338

        try:
            # we read the temporary table with the relationships and create
            # the final one
            bugsdb.store_final_relationships()
        except Exception, e:
            raise e

        printout("Done. %s/%s bugs analyzed" % (nbugs, len(bugs)))

    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
示例#48
0
class GithubBackend(Backend):
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        try:
            self.backend_password = Config.backend_password
            self.backend_user = Config.backend_user
        except AttributeError:
            printerr("\n--backend-user and --backend-password are mandatory \
            to download bugs from Github\n")
            sys.exit(1)
        self.remaining_ratelimit = 0

    def get_domain(self, url):
        strings = url.split('/')
        return strings[0] + "//" + strings[2] + "/"

    def analyze_bug(self, bug):
        #Retrieving main bug information

        printdbg(bug['url'] + " " + bug['state'] + " updated_at " +
                 bug['updated_at'] + ' (ratelimit = ' +
                 str(self.remaining_ratelimit) + ")")

        issue = bug['id']
        if bug['labels']:
            bug_type = bug['labels'][0]['name']  # FIXME
        else:
            bug_type = unicode('')
        summary = bug['title']
        desc = bug['body']
        submitted_by = People(bug['user']['login'])
        ## FIXME send petition to bug['user']['url']

        submitted_on = self.__to_datetime(bug['created_at'])

        if bug['assignee']:
            assignee = People(bug['assignee']['login'])
            ## assignee.set_name(bug.assignee.display_name)
            ## FIXME get name from bug['assignee']['url']
        else:
            assignee = People(unicode("nobody"))

        issue = GithubIssue(issue, bug_type, summary, desc, submitted_by,
                            submitted_on)
        issue.set_assigned(assignee)

        issue.set_status(bug['state'])
        issue.set_description(bug['body'])
        issue.set_web_link(bug['html_url'])

        try:
            if bug['closed_at']:
                issue.set_closed_at(self.__to_datetime(bug['closed_at']))
        except AttributeError:
            pass

        # updated_at offers ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ
        # MySQL doesn't support timezone, we remove it
        issue.set_updated_at(self.__to_datetime(bug['updated_at']))

        if bug['milestone']:
            issue.set_milestone_name(bug['milestone']['id'])
            issue.set_milestone_summary(bug['milestone']['description'])
            issue.set_milestone_title(bug['milestone']['title'])
            issue.set_milestone_web_link(bug['milestone']['url'])

        comments = self.__get_batch_comments(bug['number'])
        for c in comments:
            by = People(c['user']['login'])
            ## by.setname() FIXME - to be done
            date = self.__to_datetime(c['created_at'])
            com = Comment(c['body'], by, date)
            issue.add_comment(com)

        # activity
        entries = self.__get_batch_activities(bug['number'])
        for e in entries:
            field = e['event']
            added = e['commit_id']
            removed = unicode('')
            if e['actor']:
                by = People(e['actor']['login'])
            else:
                by = People(u"nobody")
            ## by.setname() FIXME - to be done
            date = self.__to_datetime(e['created_at'])
            change = Change(field, removed, added, by, date)
            issue.add_change(change)

        return issue

    def __to_datetime(self, str):
        # converts str time to datetime
        # MySQL doesn't support timezone, we remove it

        return parse(str[:-1])

    def __get_project_from_url(self):

        project_name = None
        url = self.url

        if url[-1] == '/':
            url = url[:-1]

        aux2 = url.rfind('/issues')
        aux1 = len('https://api.github.com/repos/')
        project_name = url[aux1:aux2]

        return project_name

    def __get_tracker_url_from_bug(self, bug):
        return bug['url'][:bug['url'].rfind('/')]

    def __get_batch_activities(self, bug_number):
        url = self.url + "/" + str(bug_number) + "/events"
        base64string = base64.encodestring(
            '%s:%s' % (self.backend_user, self.backend_password)).replace(
                '\n', '')

        request = urllib2.Request(url)
        request.add_header("Authorization", "Basic %s" % base64string)

        result = urllib2.urlopen(request)
        content = result.read()

        self.remaining_ratelimit = result.info()['x-ratelimit-remaining']
        events = json.loads(content)

        return events

    def __get_batch_comments(self, bug_number):
        url = self.url + "/" + str(bug_number) + "/comments"
        base64string = base64.encodestring(
            '%s:%s' % (self.backend_user, self.backend_password)).replace(
                '\n', '')

        request = urllib2.Request(url)
        request.add_header("Authorization", "Basic %s" % base64string)

        result = urllib2.urlopen(request)
        content = result.read()

        self.remaining_ratelimit = result.info()['x-ratelimit-remaining']
        comments = json.loads(content)

        return comments

    def __get_batch_bugs_state(self, state=OPEN_STATE, since=None):
        if state == OPEN_STATE:
            url = self.url + "?state=open&page=" + str(self.pagecont) \
                + "&per_page=100&sort=updated&direction=asc"
        else:
            url = self.url + "?state=closed&page=" + str(self.pagecont) \
                + "&per_page=100&sort=updated&direction=asc"
            # we need to download both closed and open bugs,
            #by default state = open

        if since:
            url = url + "&since=" + str(since)

        base64string = base64.encodestring(
            '%s:%s' % (self.backend_user, self.backend_password)).replace(
                '\n', '')

        request = urllib2.Request(url)
        request.add_header("Authorization", "Basic %s" % base64string)

        result = urllib2.urlopen(request)
        content = result.read()

        self.remaining_ratelimit = result.info()['x-ratelimit-remaining']
        bugs = json.loads(content)

        return bugs

    def __get_batch_bugs(self):
        if self.bugs_state == OPEN_STATE:
            bugs = self.__get_batch_bugs_state(state=OPEN_STATE,
                                               since=self.mod_date_open)
            if len(bugs) == 0:
                self.bugs_state = CLOSED_STATE
                self.pagecont = 1
        if self.bugs_state == CLOSED_STATE:
            bugs = self.__get_batch_bugs_state(state=CLOSED_STATE,
                                               since=self.mod_date_closed)
        return bugs

    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugsdb = get_database(DBGithubBackend())

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        printdbg(url)

        bugsdb.insert_supported_traker("github", "v3")
        trk = Tracker(url, "github", "v3")
        dbtrk = bugsdb.insert_tracker(trk)

        self.bugs_state = "open"
        self.pagecont = 1

        self.mod_date_open = None
        self.mod_date_closed = None

        aux_date_open = bugsdb.get_last_modification_date(state="open",
                                                          tracker_id=dbtrk.id)
        if aux_date_open:
            self.mod_date_open = aux_date_open.isoformat()
        aux_date_closed = bugsdb.get_last_modification_date(
            state="closed", tracker_id=dbtrk.id)
        if aux_date_closed:
            self.mod_date_closed = aux_date_closed.isoformat()

        printdbg("Last open bug already cached: %s" % self.mod_date_open)
        printdbg("Last closed bug already cached: %s" % self.mod_date_closed)
        bugs = self.__get_batch_bugs()
        nbugs = len(bugs)

        if len(bugs) == 0:
            if aux_date_open or aux_date_closed:
                printout("Bicho database up to date")
            else:
                printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        auxcont = 0
        while len(bugs) > 0:

            for bug in bugs:

                try:
                    issue_data = self.analyze_bug(bug)
                except Exception:
                    #FIXME it does not handle the e
                    printerr("Error in function analyzeBug with URL: ' \
                    '%s and Bug: %s" % (url, bug))
                    raise

                try:
                    # we can have meta-trackers but we want to have the
                    # original tracker name
                    tr_url = self.__get_tracker_url_from_bug(bug)
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1
            bugs = self.__get_batch_bugs()
            nbugs = nbugs + len(bugs)

        #end while

        printout("Done. %s bugs analyzed" % (nbugs))
示例#49
0
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored"
                        % (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg ("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1

            try:
                bugs = self.__get_batch_bugs()
            except GitHubRateLimitReached:
                printout("GitHub rate limit reached. To resume, wait some minutes.")
                sys.exit(0)

            nbugs = nbugs + len(bugs)

        #end while

        printout("Done. %s bugs analyzed" % (nbugs))

Backend.register_backend("github", GithubBackend)
示例#50
0
文件: lp.py 项目: yujuanjiang/Bicho
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338

        try:
            # we read the temporary table with the relationships and create
            # the final one
            bugsdb.store_final_relationships()
        except Exception, e:
            raise e

        printout("Done. %s/%s bugs analyzed" % (nbugs, len(bugs)))

    def run(self):
        print ("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
示例#51
0
文件: atljira.py 项目: hjmacho/Bicho
                issue = jira.issue(bug_key,expand='changelog')
                self.analyze_bug_list(issue, self.serverUrl+'/browse/', bugsdb, dbtrk.id)
            except Exception, e:
                #printerr(e)
                print(e)

        else:
            self.last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
            if self.last_mod_date:
                # self.url = self.url + "&updated:after=" + last_mod_date
                printdbg("Last bugs cached were modified at: %s" % self.last_mod_date)

            bugs_number = self.bugsNumber(jira)
            print "Tickets to be retrieved:", str(bugs_number)
            remaining = bugs_number
            while (remaining > 0):
                startAtIssue = bugs_number-remaining
                jira = JIRA(options_jira)
                issuesAux = jira.search_issues('project=' + self.projectName + ' order by id asc',startAt=startAtIssue,maxResults=issues_per_query,fields=None)
                issues=[]
                for i in issuesAux:
                    issues.append(jira.issue(i.key, expand='changelog'))
                self.analyze_bug_list(issues, self.serverUrl+'/browse/', bugsdb, dbtrk.id)
                remaining -= issues_per_query
                #print "Remaining time: ", (remaining/issues_per_xml_query)*Config.delay/60, "m", "(",remaining,")"
                time.sleep(self.delay)

            printout("Done. %s bugs analyzed" % (bugs_number))

Backend.register_backend("atljira", JiraBackend)
示例#52
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # limit=-1 is NOT recognized as 'all'.  500 is a reasonable limit. - allura code
        issues_per_query = 500
        start_page = 0

        bugs = []
        bugsdb = get_database(DBAlluraBackend())

        # still useless in allura
        bugsdb.insert_supported_traker("allura", "beta")
        trk = Tracker(Config.url, "allura", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date()

        # Date before the first ticket
        time_window_start = "1900-01-01T00:00:00Z"
        time_window_end = datetime.now().isoformat() + "Z"

        if last_mod_date:
            time_window_start = last_mod_date
            printdbg("Last bugs analyzed were modified on: %s" % last_mod_date)

        time_window = time_window_start + " TO  " + time_window_end

        self.url_issues = Config.url + "/search/?limit=1"
        self.url_issues += "&q="
        # A time range with all the tickets
        self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
        printdbg("URL for getting metadata " + self.url_issues)

        f = urllib.urlopen(self.url_issues)
        ticketTotal = json.loads(f.read())

        total_issues = int(ticketTotal['count'])
        total_pages = total_issues / issues_per_query
        print("Number of tickets: " + str(total_issues))

        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (
            total_issues * Config.delay) / (60 * 60), "h)"

        while start_page <= total_pages:
            self.url_issues = Config.url + "/search/?limit=" + str(
                issues_per_query)
            self.url_issues += "&page=" + str(start_page) + "&q="
            # A time range with all the tickets
            self.url_issues += urllib.quote("mod_date_dt:[" + time_window +
                                            "]")
            # Order by mod_date_dt desc
            self.url_issues += "&sort=mod_date_dt+asc"

            printdbg("URL for next issues " + self.url_issues)

            f = urllib.urlopen(self.url_issues)

            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
示例#53
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # redmine 1.0 support
        last_page = 1
        tickets_page = 25  # fixed redmine

        bugs = []
        bugsdb = get_database(DBRedmineBackend())

        # still useless in redmine
        bugsdb.insert_supported_traker("redmine", "beta")
        trk = Tracker(Config.url, "redmine", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        updated_on = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
        self.url_issues = self._get_issues_url(updated_on)
        url = self.url_issues + "&page=" + str(last_page)
        request = urllib2.Request(url)

        if self.backend_user:
            base64string = base64.encodestring(
                '%s:%s' %
                (Config.backend_user, Config.backend_password)).replace(
                    '\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)

        # Get statuses
        self._get_statuses()

        f = urllib2.urlopen(request)
        tickets = json.loads(f.read())

        if not tickets["issues"]:
            printout("Done. No new bugs to analyze")
            return

        for ticket in tickets["issues"]:
            issue = self.analyze_bug(ticket)
            bugsdb.insert_issue(issue, dbtrk.id)
            time.sleep(self.delay)

        last_ticket = tickets["issues"][0]['id']

        while True:
            last_page += 1
            url = self.url_issues + "&page=" + str(last_page)
            request = urllib2.Request(url)
            #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '')
            #request.add_header("Authorization", "Basic %s" % base64string)
            f = urllib2.urlopen(request)
            tickets = json.loads(f.read())

            if len(tickets['issues']) == 0:
                break

            pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) +
                          " " + str(tickets["issues"][-1]['id']))

            if tickets["issues"][0]['id'] == last_ticket:
                break

            for ticket in tickets["issues"]:
                issue = self.analyze_bug(ticket)
                bugsdb.insert_issue(issue, dbtrk.id)
                time.sleep(self.delay)

        pprint.pprint("Total pages: " + str(last_page))

        printout("Done. Bugs analyzed:" + str(last_page * tickets_page))
示例#54
0
                if (tr_url != url):
                    aux_trk = Tracker(tr_url, "launchpad", "x.x")
                    dbtrk = bugsdb.insert_tracker(aux_trk)
                bugsdb.insert_issue(issue_data, dbtrk.id)
            except UnicodeEncodeError:
                printerr(
                    "UnicodeEncodeError: the issue %s couldn't be stored" %
                    (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored" %
                         (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored" %
                         (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
            time.sleep(self.delay)

        try:
            # we read the temporary table with the relationships and create
            # the final one
            bugsdb.store_final_relationships()
        except:
            raise

        printout("Done. %s bugs analyzed" % (nbugs))


Backend.register_backend("lp", LPBackend)
示例#55
0
文件: lp.py 项目: yujuanjiang/Bicho
    def run(self):
        print ("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + "launchpad-credential")
        self.lp = Launchpad.login_with("Bicho", "production", credentials_file=cre_file)

        aux_status = [
            "New",
            "Incomplete",
            "Opinion",
            "Invalid",
            "Won't Fix",
            "Expired",
            "Confirmed",
            "Triaged",
            "In Progress",
            "Fix Committed",
            "Fix Released",
            "Incomplete (with response)",
            "Incomplete (without response)",
        ]

        # Check whether the project is a meta project
        lp_project = self.lp.projects[pname]

        if hasattr(lp_project, "projects"):
            projects = [p for p in lp_project.projects]
        else:
            projects = [lp_project]

        printdbg("%s projects to analyze" % len(projects))

        # Still useless - insert meta project
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        for p in projects:
            trk_url = p.web_link.replace("://", "://bugs.")
            trk = Tracker(trk_url, "launchpad", "x.x")
            dbtrk = bugsdb.insert_tracker(trk)

            last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

            if last_mod_date:
                bugs = p.searchTasks(
                    status=aux_status, omit_duplicates=False, order_by="date_last_updated", modified_since=last_mod_date
                )
            else:
                bugs = p.searchTasks(status=aux_status, omit_duplicates=False, order_by="date_last_updated")

            printdbg("Last bug already cached: %s" % last_mod_date)

            nbugs = len(bugs)

            if nbugs == 0:
                printout("No bugs found on %s" % p.name)
                continue
            else:
                printout("%s bugs found on %s" % (nbugs, p.name))

            self.analyze_project_bugs(bugs, dbtrk, bugsdb)
示例#56
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # redmine 1.0 support
        last_page = 1
        tickets_page = 25  # fixed redmine

        bugs = []
        bugsdb = get_database(DBRedmineBackend())

        # still useless in redmine
        bugsdb.insert_supported_traker("redmine", "beta")
        trk = Tracker(Config.url, "redmine", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        updated_on = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)
        self.url_issues = self._get_issues_url(updated_on)
        url = self.url_issues + "&page=" + str(last_page)
        request = urllib2.Request(url)

        if self.backend_user:
            base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '')
            request.add_header("Authorization", "Basic %s" % base64string)   

        # Get statuses
        self._get_statuses()

        f = urllib2.urlopen(request)         
        tickets = json.loads(f.read())
        for ticket in tickets["issues"]:
            issue = self.analyze_bug(ticket)
            bugsdb.insert_issue(issue, dbtrk.id)
            time.sleep(self.delay)

        last_ticket = tickets["issues"][0]['id']

        while True:
            last_page += 1
            url = self.url_issues + "&page=" + str(last_page)
            request = urllib2.Request(url)
            #base64string = base64.encodestring('%s:%s' % (Config.backend_user, Config.backend_password)).replace('\n', '')
            #request.add_header("Authorization", "Basic %s" % base64string)
            f = urllib2.urlopen(request)
            tickets = json.loads(f.read())

            if len(tickets['issues']) == 0:
                break

            pprint.pprint("Tickets read: " + str(tickets["issues"][0]['id']) + " " + str(tickets["issues"][-1]['id']))

            if tickets["issues"][0]['id'] == last_ticket:
                break

            for ticket in tickets["issues"]:
                issue = self.analyze_bug(ticket)
                bugsdb.insert_issue(issue, dbtrk.id)
                time.sleep(self.delay)

        pprint.pprint("Total pages: " + str(last_page))

        printout("Done. Bugs analyzed:" + str(last_page * tickets_page))