예제 #1
0
    def analyze_bug(self, bug_url):
        #Retrieving main bug information
        printdbg(bug_url)
        bug_number = bug_url.split('/')[-1]

        try:
            f = urllib.urlopen(bug_url)

            # f = urllib.urlopen(bug_url)
            json_ticket = f.read()
            # print json_ticket
            try:
                issue_allura = json.loads(json_ticket)["ticket"]
                issue = self.parse_bug(issue_allura)
                changes = self.analyze_bug_changes(bug_url)
                for c in changes:
                    issue.add_change(c)
                return issue

            except Exception, e:
                print "Problems with Ticket format: " + bug_number
                print e
                return None

        except Exception, e:
            printerr("Error in bug analysis: " + bug_url)
            print(e)
            raise
예제 #2
0
 def _urlopen_auth(self, url):
     """
     Opens an URL using an authenticated session
     """
     keep_trying = True
     while keep_trying:
         if self._is_auth_session():
             opener = urllib2.build_opener()
             for c in self.cookies:
                 q = str(c) + '=' + self.cookies[c]
                 opener.addheaders.append(('Cookie', q))
         keep_trying = False
         try:
             aux = urllib2.urlopen(url)
         except urllib2.HTTPError as e:
             printerr("The server couldn\'t fulfill the request.")
             printerr("Error code: %s" % e.code)
         except urllib2.URLError as e:
             printdbg("Bicho failed to reach the Bugzilla server")
             printdbg("Reason: %s" % e.reason)
             printdbg("Bicho goes into hibernation for %s seconds" %
                      HIBERNATION_LENGTH)
             time.sleep(HIBERNATION_LENGTH)
             keep_trying = True
     return aux
예제 #3
0
 def _store_issue(self, issue, trk_id):
     try:
         self.bugsdb.insert_issue(issue, trk_id)
         printdbg("Issue #%s stored " % issue.issue)
     except UnicodeEncodeError:
         printerr("UnicodeEncodeError: the issue %s couldn't be stored" %
                  issue.issue)
예제 #4
0
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        self.max_issues = Config.nissues
        self.no_resume = Config.no_resume

        if Config.start_from:
            # Date format was already checked by config class
            from dateutil import parser
            self.start_from = parser.parse(Config.start_from)
            self.from_id = None
        elif Config.from_id:
            self.from_id = Config.from_id
            self.start_from = None
        else:
            self.start_from = None
            self.from_id = None

        self.db = get_database(DBManiphestBackend())

        self.identities = {}
        self.projects = {}

        try:
            self.backend_token = Config.backend_token
            self.conduit = Conduit(self.url, self.backend_token)
        except AttributeError:
            printerr("Error: --backend-token is mandatory to download issues from Maniphest\n")
            sys.exit(1)
예제 #5
0
파일: bg.py 프로젝트: MetricsGrimoire/Bicho
 def _store_issue(self, issue, trk_id):
     try:
         self.bugsdb.insert_issue(issue, trk_id)
         printdbg("Issue #%s stored " % issue.issue)
     except UnicodeEncodeError:
         printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                  % issue.issue)
예제 #6
0
    def _set_version(self):
        if self.version:
            printdbg("Bugzilla version: %s" % self.version)
            return

        info_url = self._get_info_url(self.url)

        f = self._urlopen_auth(info_url)
        try:
            printdbg("Getting bugzilla version from %s" % info_url)
            contents = f.read()
        except Exception:
            printerr("Error retrieving URL %s" % info_url)
            raise
        f.close()

        handler = BugzillaHandler()
        parser = xml.sax.make_parser()
        parser.setContentHandler(handler)
        try:
            cleaned_contents = ''. \
                join(c for c in contents if valid_XML_char_ordinal(ord(c)))
            parser.feed(cleaned_contents)
        except Exception:
            printerr("Error parsing URL %s" % info_url)
            raise
        parser.close()

        self.version = handler.get_version()
        printdbg("Bugzilla version: %s" % self.version)
예제 #7
0
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        self.max_issues = Config.nissues
        self.no_resume = Config.no_resume

        if Config.start_from:
            # Date format was already checked by config class
            from dateutil import parser
            self.start_from = parser.parse(Config.start_from)
            self.from_id = None
        elif Config.from_id:
            self.from_id = Config.from_id
            self.start_from = None
        else:
            self.start_from = None
            self.from_id = None

        self.db = get_database(DBManiphestBackend())

        self.identities = {}
        self.projects = {}

        try:
            self.backend_token = Config.backend_token
            self.conduit = Conduit(self.url, self.backend_token)
        except AttributeError:
            printerr(
                "Error: --backend-token is mandatory to download issues from Maniphest\n"
            )
            sys.exit(1)
예제 #8
0
    def analyze_bug(self, bug_url):
        #Retrieving main bug information
        printdbg(bug_url)
        bug_number = bug_url.split('/')[-1]

        try:
            f = urllib.urlopen(bug_url)

            # f = urllib.urlopen(bug_url)
            json_ticket = f.read()
            # print json_ticket
            try:
                issue_allura = json.loads(json_ticket)["ticket"]
                issue = self.parse_bug(issue_allura)
                changes = self.analyze_bug_changes(bug_url)
                for c in changes:
                    issue.add_change(c)
                return issue

            except Exception, e:
                print "Problems with Ticket format: " + bug_number
                print e
                return None

        except Exception, e:
            printerr("Error in bug analysis: " + bug_url)
            print(e)
            raise
예제 #9
0
    def _safe_xml_parse(self, bugs_url, handler):
        f = self._urlopen_auth(bugs_url)
        parser = xml.sax.make_parser()
        parser.setContentHandler(handler)

        try:
            contents = f.read()
        except Exception:
            printerr("Error retrieving URL: %s" % (bugs_url))
            raise

        try:
            parser.feed(contents)
            parser.close()
        except Exception:
            # Clean only the invalid XML
            try:
                parser2 = xml.sax.make_parser()
                parser2.setContentHandler(handler)
                parser2.setContentHandler(handler)
                printdbg("Cleaning dirty XML")
                cleaned_contents = ''. \
                    join(c for c in contents if valid_XML_char_ordinal(ord(c)))
                parser2.feed(cleaned_contents)
                parser2.close()
            except Exception:
                printerr("Error parsing URL: %s" % (bugs_url))
                raise
        f.close()
예제 #10
0
파일: bg.py 프로젝트: MetricsGrimoire/Bicho
    def _safe_xml_parse(self, bugs_url, handler):
        f = self._urlopen_auth(bugs_url)
        parser = xml.sax.make_parser()
        parser.setContentHandler(handler)

        try:
            contents = f.read()
        except Exception:
            printerr("Error retrieving URL: %s" % (bugs_url))
            raise

        try:
            parser.feed(contents)
            parser.close()
        except Exception:
            # Clean only the invalid XML
            try:
                parser2 = xml.sax.make_parser()
                parser2.setContentHandler(handler)
                parser2.setContentHandler(handler)
                printdbg("Cleaning dirty XML")
                cleaned_contents = ''. \
                    join(c for c in contents if valid_XML_char_ordinal(ord(c)))
                parser2.feed(cleaned_contents)
                parser2.close()
            except Exception:
                printerr("Error parsing URL: %s" % (bugs_url))
                raise
        f.close()
예제 #11
0
파일: bg.py 프로젝트: MetricsGrimoire/Bicho
 def _urlopen_auth(self, url):
     """
     Opens an URL using an authenticated session
     """
     keep_trying = True
     while keep_trying:
         if self._is_auth_session():
             opener = urllib2.build_opener()
             for c in self.cookies:
                 q = str(c) + '=' + self.cookies[c]
                 opener.addheaders.append(('Cookie', q))
         keep_trying = False
         try:
             aux = urllib2.urlopen(url)
         except urllib2.HTTPError as e:
             printerr("The server couldn\'t fulfill the request.")
             printerr("Error code: %s" % e.code)
         except urllib2.URLError as e:
             printdbg("Bicho failed to reach the Bugzilla server")
             printdbg("Reason: %s" % e.reason)
             printdbg("Bicho goes into hibernation for %s seconds"
                      % HIBERNATION_LENGTH)
             time.sleep(HIBERNATION_LENGTH)
             keep_trying = True
     return aux
예제 #12
0
파일: bg.py 프로젝트: MetricsGrimoire/Bicho
    def _set_version(self):
        if self.version:
            printdbg("Bugzilla version: %s" % self.version)
            return

        info_url = self._get_info_url(self.url)

        f = self._urlopen_auth(info_url)
        try:
            printdbg("Getting bugzilla version from %s" % info_url)
            contents = f.read()
        except Exception:
            printerr("Error retrieving URL %s" % info_url)
            raise
        f.close()

        handler = BugzillaHandler()
        parser = xml.sax.make_parser()
        parser.setContentHandler(handler)
        try:
            cleaned_contents = ''. \
                join(c for c in contents if valid_XML_char_ordinal(ord(c)))
            parser.feed(cleaned_contents)
        except Exception:
            printerr("Error parsing URL %s" % info_url)
            raise
        parser.close()

        self.version = handler.get_version()
        printdbg("Bugzilla version: %s" % self.version)
예제 #13
0
파일: jira.py 프로젝트: adamlofting/Bicho
    def parse_changes(self):
        soup = BeautifulSoup(self.html)
        self.remove_comments(soup)
        remove_tags = ['i']
        try:
            [i.replaceWith(i.contents[0]) for i in soup.findAll(remove_tags)]
        except Exception:
            None

        changes = []
        #FIXME The id of the changes are not stored
        tables = soup.findAll("div", {"class": "actionContainer"})

        for table in tables:
            author_date_text = table.find("div", {"class": "action-details"})

            if author_date_text is None:
                # no changes have been performed on the issue
                continue
            elif len(author_date_text) < 3:
                self.changes_lost += 1
                printerr("Change author format not supported. Change lost!")
                continue

            a_link = table.find("a", {"class": "user-hover user-avatar"})

            if a_link:
                # at this point a_link will be similar to the lines below:
                #<a class="user-hover user-avatar" rel="kiyoshi.lee"
                author_url = a_link['rel']
                author = People(author_url)
            else:
                # instead of <a .. we got a <span ..
                span_link = table.find("span", {"class": "user-hover user-avatar"})
                author_url = span_link['rel']
                author = People(author_url)


            # we look for a string similar to:
            #<time datetime="2011-11-19T00:27-0800">19/Nov/11 12:27 AM</time>

            raw_date = author_date_text.find('time')['datetime']
            date = parse(raw_date).replace(tzinfo=None)

            rows = list(table.findAll('tr'))
            for row in rows:
                cols = list(row.findAll('td'))
                if len(cols) == 3:
                    field = unicode(cols[0].contents[0].strip())
                    if field == "Assignee":
                        old = unicode(self._get_identifier(cols[1]))
                        new = unicode(self._get_identifier(cols[2]))
                    else:
                        old = unicode(cols[1].contents[0].strip())
                        new = unicode(cols[2].contents[0].strip())

                    change = Change(field, old, new, author, date)
                    changes.append(change)
        return changes
예제 #14
0
    def parse_changes(self):
        soup = BeautifulSoup(self.html)
        self.remove_comments(soup)
        remove_tags = ['i']
        try:
            [i.replaceWith(i.contents[0]) for i in soup.findAll(remove_tags)]
        except Exception:
            None

        changes = []
        #FIXME The id of the changes are not stored
        tables = soup.findAll("div", {"class": "actionContainer"})

        for table in tables:
            author_date_text = table.find("div", {"class": "action-details"})

            if author_date_text is None:
                # no changes have been performed on the issue
                continue
            elif len(author_date_text) < 3:
                self.changes_lost += 1
                printerr("Change author format not supported. Change lost!")
                continue

            auth_link = table.find("a", {"class": "user-hover user-avatar"})

            if not auth_link:
                # Automated changes usually have this class
                auth_link = table.find("a", {"class": "user-hover"})

                # instead of <a .. we got a <span>
                if not auth_link:
                    auth_link = table.find("span",
                                           {"class": "user-hover user-avatar"})

            author_url = auth_link.get('rel', 'anonymous')
            author = People(author_url)

            # we look for a string similar to:
            #<time datetime="2011-11-19T00:27-0800">19/Nov/11 12:27 AM</time>

            raw_date = author_date_text.find('time')['datetime']
            date = parse(raw_date).replace(tzinfo=None)

            rows = list(table.findAll('tr'))
            for row in rows:
                cols = list(row.findAll('td'))
                if len(cols) == 3:
                    field = unicode(cols[0].contents[0].strip())
                    if field == "Assignee":
                        old = unicode(self._get_identifier(cols[1]))
                        new = unicode(self._get_identifier(cols[2]))
                    else:
                        old = unicode(cols[1].contents[0].strip())
                        new = unicode(cols[2].contents[0].strip())

                    change = Change(field, old, new, author, date)
                    changes.append(change)
        return changes
예제 #15
0
    def run(self):
        printout("Running Bicho - url: %s" % self.url)

        try:
            self.fetch_and_store()
        except (requests.exceptions.HTTPError, ReviewBoardAPIError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #16
0
    def run(self):
        printout("Running Bicho with delay of %s seconds - %s" % (self.delay, self.url))

        try:
            self.fetch_and_store_tickets()
        except (requests.exceptions.HTTPError, TracRPCError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #17
0
파일: trac.py 프로젝트: rodrigoprimo/Bicho
    def run(self):
        printout("Running Bicho - %s" % self.url)

        try:
            self.fetch_and_store_tickets()
        except (requests.exceptions.HTTPError, TracRPCError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #18
0
    def run(self):
        printout("Running Bicho - url: %s" % self.url)

        try:
            self.fetch_and_store()
        except (requests.exceptions.HTTPError, ReviewBoardAPIError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #19
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 250
        start_issue = 1

        bugs = []
        bugsdb = get_database(DBGoogleCodeBackend())

        # still useless
        bugsdb.insert_supported_traker("googlecode", "beta")
        trk = Tracker(Config.url, "googlecode", "beta")

        dbtrk = bugsdb.insert_tracker(trk)

        self.url = Config.url

       #  https://code.google.com/feeds/issues/p/mobile-time-care
        self.url_issues = Config.url + "/issues/full?max-results=1"
        printdbg("URL for getting metadata " + self.url_issues)

        d = feedparser.parse(self.url_issues)

        total_issues = int(d['feed']['opensearch_totalresults'])
        print "Total bugs: ", total_issues
        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (total_issues * Config.delay) / (60 * 60), "h)"

        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (remaining) * Config.delay / 60, "m", " issues ", str(remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                             % (issue.issue))
예제 #20
0
    def parse_changes(self):
        soup = BeautifulSoup(self.html)
        self.remove_comments(soup)
        remove_tags = ['a', 'span', 'i']
        changes = []
        tables = soup.findAll('table')

        # We look for the first table with 5 cols
        table = None
        for table in tables:
            if len(table.tr.findAll('th', recursive=False)) == 5:
                try:
                    for i in table.findAll(remove_tags):
                        i.replaceWith(i.text)
                except:
                    printerr("error removing HTML tags")
                break

        if table is None:
            return changes

        rows = list(table.findAll('tr'))
        for row in rows[1:]:
            cols = list(row.findAll('td'))
            if len(cols) == 5:
                person_email = cols[0].contents[0].strip()
                person_email = unicode(person_email.replace('&#64;', '@'))
                date = self._to_datetime_with_secs(cols[1].contents[0].strip())
                # when the field contains an Attachment, the list has more
                #than a field. For example:
                #
                # [u'\n', u'Attachment #12723', u'\n              Flag\n            ']
                #
                if len(cols[2].contents) > 1:
                    aux_c = unicode(" ".join(cols[2].contents))
                    field = unicode(aux_c.replace("\n", "").strip())
                else:
                    field = unicode(cols[2].contents[0].replace("\n",
                                                                "").strip())
                removed = unicode(cols[3].contents[0].strip())
                added = unicode(cols[4].contents[0].strip())
            else:
                # same as above with the Attachment example
                if len(cols[0].contents) > 1:
                    aux_c = unicode(" ".join(cols[0].contents))
                    field = aux_c.replace("\n", "").strip()
                else:
                    field = cols[0].contents[0].strip()
                removed = cols[1].contents[0].strip()
                added = cols[2].contents[0].strip()

            field, removed, added = self.sanityze_change(field, removed, added)
            by = People(person_email)
            by.set_email(person_email)
            change = Change(field, removed, added, by, date)
            changes.append(change)

        return changes
예제 #21
0
    def run(self):
        printout("Running Bicho with delay of %s seconds - %s" %
                 (self.delay, self.url))

        try:
            self.fetch_and_store_tickets()
        except (requests.exceptions.HTTPError, TracRPCError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #22
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_summary(self, soup):
     """
     """
     try:
         m = ISSUE_SUMMARY_PATTERN.match(unicode(soup.title.string))
         return m.group(1)
     except:
         printerr('Error parsing issue summary')
         raise SourceForgeParserError('Error parsing issue summary')
예제 #23
0
파일: bg.py 프로젝트: MetricsGrimoire/Bicho
    def parse_changes(self):
        soup = BeautifulSoup(self.html)
        self.remove_comments(soup)
        remove_tags = ['a', 'span', 'i']
        changes = []
        tables = soup.findAll('table')

        # We look for the first table with 5 cols
        table = None
        for table in tables:
            if len(table.tr.findAll('th', recursive=False)) == 5:
                try:
                    for i in table.findAll(remove_tags):
                        i.replaceWith(i.text)
                except:
                    printerr("error removing HTML tags")
                break

        if table is None:
            return changes

        rows = list(table.findAll('tr'))
        for row in rows[1:]:
            cols = list(row.findAll('td'))
            if len(cols) == 5:
                person_email = cols[0].contents[0].strip()
                person_email = unicode(person_email.replace('&#64;', '@'))
                date = self._to_datetime_with_secs(cols[1].contents[0].strip())
                # when the field contains an Attachment, the list has more
                #than a field. For example:
                #
                # [u'\n', u'Attachment #12723', u'\n              Flag\n            ']
                #
                if len(cols[2].contents) > 1:
                    aux_c = unicode(" ".join(cols[2].contents))
                    field = unicode(aux_c.replace("\n", "").strip())
                else:
                    field = unicode(cols[2].contents[0].replace("\n", "").strip())
                removed = unicode(cols[3].contents[0].strip())
                added = unicode(cols[4].contents[0].strip())
            else:
                # same as above with the Attachment example
                if len(cols[0].contents) > 1:
                    aux_c = unicode(" ".join(cols[0].contents))
                    field = aux_c.replace("\n", "").strip()
                else:
                    field = cols[0].contents[0].strip()
                removed = cols[1].contents[0].strip()
                added = cols[2].contents[0].strip()

            field, removed, added = self.sanityze_change(field, removed, added)
            by = People(person_email)
            by.set_email(person_email)
            change = Change(field, removed, added, by, date)
            changes.append(change)

        return changes
예제 #24
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_summary(self, soup):
     """
     """
     try:
         m = ISSUE_SUMMARY_PATTERN.match(unicode(soup.title.string))
         return m.group(1)
     except:
         printerr('Error parsing issue summary')
         raise SourceForgeParserError('Error parsing issue summary')
예제 #25
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_assigned_to(self, soup):
     """
     """
     try:
         assigned = soup.find({'label': True},
                              text=ISSUE_ASSIGNED_TO_PATTERN).findNext('p')
         return assigned.contents[0]
     except:
         printerr('Error parsing issue assigned to')
         raise SourceForgeParserError('Error parsing issue assigned to')
예제 #26
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_resolution(self, soup):
     """
     """
     try:
         resolution = soup.find({'label': True},
                                text=ISSUE_RESOLUTION_PATTERN).findNext('p')
         return resolution.contents[0]
     except:
         printerr('Error parsing issue resolution')
         raise SourceForgeParserError('Error parsing issue resolution')
예제 #27
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_group(self, soup):
     """
     """
     try:
         group = soup.find({'label': True},
                           text=ISSUE_GROUP_PATTERN).findNext('p')
         return group.contents[0]
     except:
         printerr('Error parsing issue group')
         raise SourceForgeParserError('Error parsing issue group')
예제 #28
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_status(self, soup):
     """
     """
     try:
         status = soup.find({'label': True},
                            text=ISSUE_STATUS_PATTERN).findNext('p')
         return status.contents[0]
     except:
         printerr('Error parsing issue status')
         raise SourceForgeParserError('Error parsing issue status')
예제 #29
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_visibility(self, soup):
     """
     """
     try:
         visibility = soup.find({'label': True},
                                text=ISSUE_VISIBILITY_PATTERN).findNext('p')
         return visibility.contents[0]
     except:
         printerr('Error parsing issue visibility')
         raise SourceForgeParserError('Error parsing issue visibility')
예제 #30
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_priority(self, soup):
     """
     """
     try:
         priority = soup.find({'label': True},
                              text=ISSUE_PRIORITY_PATTERN).findNext('p')
         return priority.contents[0]
     except:
         printerr('Error parsing issue priority')
         raise SourceForgeParserError('Error parsing issue priority')
예제 #31
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_category(self, soup):
     """
     """
     try:
         category = soup.find({'label': True},
                              text=ISSUE_CATEGORY_PATTERN).findNext('p')
         return category.contents[0]
     except:
         printerr('Error parsing issue category')
         raise SourceForgeParserError('Error parsing issue category')
예제 #32
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_status(self, soup):
     """
     """
     try:
         status = soup.find({
             'label': True
         }, text=ISSUE_STATUS_PATTERN).findNext('p')
         return status.contents[0]
     except:
         printerr('Error parsing issue status')
         raise SourceForgeParserError('Error parsing issue status')
예제 #33
0
    def run(self):
        printout("Running Bicho - %s" % self.url)

        if not self.check_auth():
            sys.exit(1)

        try:
            self.fetch_and_store_tasks()
        except (requests.exceptions.HTTPError, ConduitError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #34
0
    def check_auth(self):
        # Check conduit credentials
        try:
            printdbg("Checking conduit credentials")
            self.conduit.whoami()
            printdbg("Credentials checked")

            return True
        except (requests.exceptions.HTTPError, ConduitError), e:
            printerr("Error: %s" % e)
            return False
예제 #35
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_group(self, soup):
     """
     """
     try:
         group = soup.find({
             'label': True
         }, text=ISSUE_GROUP_PATTERN).findNext('p')
         return group.contents[0]
     except:
         printerr('Error parsing issue group')
         raise SourceForgeParserError('Error parsing issue group')
예제 #36
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_category(self, soup):
     """
     """
     try:
         category = soup.find({
             'label': True
         }, text=ISSUE_CATEGORY_PATTERN).findNext('p')
         return category.contents[0]
     except:
         printerr('Error parsing issue category')
         raise SourceForgeParserError('Error parsing issue category')
예제 #37
0
    def run(self):
        printout("Running Bicho - %s" % self.url)

        if not self.check_auth():
            sys.exit(1)

        try:
            self.fetch_and_store_tasks()
        except (requests.exceptions.HTTPError, ConduitError), e:
            printerr("Error: %s" % e)
            sys.exit(1)
예제 #38
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_priority(self, soup):
     """
     """
     try:
         priority = soup.find({
             'label': True
         }, text=ISSUE_PRIORITY_PATTERN).findNext('p')
         return priority.contents[0]
     except:
         printerr('Error parsing issue priority')
         raise SourceForgeParserError('Error parsing issue priority')
예제 #39
0
파일: github.py 프로젝트: davidziman/Bicho
 def __init__(self):
     self.url = Config.url
     self.delay = Config.delay
     try:
         self.backend_password = Config.backend_password
         self.backend_user = Config.backend_user
     except AttributeError:
         printerr("\n--backend-user and --backend-password are mandatory \
         to download bugs from Github\n")
         sys.exit(1)
     self.remaining_ratelimit = 0
예제 #40
0
파일: github.py 프로젝트: davidziman/Bicho
 def __init__(self):
     self.url = Config.url
     self.delay = Config.delay
     try:
         self.backend_password = Config.backend_password
         self.backend_user = Config.backend_user
     except AttributeError:
         printerr("\n--backend-user and --backend-password are mandatory \
         to download bugs from Github\n")
         sys.exit(1)
     self.remaining_ratelimit = 0
예제 #41
0
    def check_auth(self):
        # Check conduit credentials
        try:
            printdbg("Checking conduit credentials")
            self.conduit.whoami()
            printdbg("Credentials checked")

            return True
        except (requests.exceptions.HTTPError, ConduitError), e:
            printerr("Error: %s" % e)
            return False
예제 #42
0
파일: trac.py 프로젝트: iganchev/Bicho
    def run(self):

        cfg = Config()
        cfg.load_from_file("/home/user/Grimoire/Bicho/bicho/bicho.conf")

        # url = 'http://10.137.2.15:8000/test/'
        # url = 'http://dev.aubio.org/'
        url = 'http://trac.nginx.org/nginx/'
        # url = 'http://software.rtcm-ntrip.org/'
        # project = "http://trac.nginx.org/nginx/"
        # issues = TracIssue
        tibi = TracBackend()
        issues = tibi.getIDs(url)
        bugsdb = get_database(DBTracBackend())
        bugsdb.insert_supported_traker("trac", "1.0.6post2")

        trk = Tracker(url, "trac", "1.0.6post2")
        dbtrk = bugsdb.insert_tracker(trk)

        self.start_from = 0 if self.start_from is None else self.start_from
        self.end_with = len(issues) if self.end_with is None else self.end_with

        for i in range(len(issues)):
            if i < self.start_from:
                continue
            elif i > self.end_with:
                break

            printdbg("We are trying issue: {}".format(issues[i]))
            try:
                printdbg("Getting the entry")

                raw_data = tibi.getIssue(url, issues[i])
                printdbg("Parsing the entry")
                issue = tibi.analyzeBug(raw_data)
                printdbg("Inserting the issue into the DB")

                # Put an issue into the database.
                # pprint(vars(issue))
                bugsdb.insert_issue(issue, dbtrk.id)

            except UnicodeEncodeError, e:
                printerr(
                    "UnicodeEncodeError: the issue %s couldn't be stored"
                    % (issues[i]))
                print e

            except Exception, e:
                printerr("Error :")
                # print e
                import traceback
                traceback.print_exc()
                sys.exit(0)
예제 #43
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_resolution(self, soup):
     """
     """
     try:
         resolution = soup.find({
             'label': True
         },
                                text=ISSUE_RESOLUTION_PATTERN).findNext('p')
         return resolution.contents[0]
     except:
         printerr('Error parsing issue resolution')
         raise SourceForgeParserError('Error parsing issue resolution')
예제 #44
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_assigned_to(self, soup):
     """
     """
     try:
         assigned = soup.find({
             'label': True
         },
                              text=ISSUE_ASSIGNED_TO_PATTERN).findNext('p')
         return assigned.contents[0]
     except:
         printerr('Error parsing issue assigned to')
         raise SourceForgeParserError('Error parsing issue assigned to')
예제 #45
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_visibility(self, soup):
     """
     """
     try:
         visibility = soup.find({
             'label': True
         },
                                text=ISSUE_VISIBILITY_PATTERN).findNext('p')
         return visibility.contents[0]
     except:
         printerr('Error parsing issue visibility')
         raise SourceForgeParserError('Error parsing issue visibility')
예제 #46
0
파일: lp.py 프로젝트: yujuanjiang/Bicho
 def _get_person(self, lpperson):
     """
     Returns Bicho People object from Launchpad person object
     """
     try:
         p = People(lpperson.name)
         p.set_name(lpperson.display_name)
         if lpperson.confirmed_email_addresses:
             for m in lpperson.confirmed_email_addresses:
                 p.set_email(m.email)
                 break
     except Exception, e:
         printerr(str(e))
         p = People("unknown")
예제 #47
0
파일: lp.py 프로젝트: yujuanjiang/Bicho
    def analyze_project_bugs(self, bugs, dbtrk, bugsdb):
        analyzed = []
        nbugs = 0

        for bug in bugs:
            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception, e:
                printerr(
                    "Error in function analyzeBug with URL: ' \
                         '%s and Bug: %s"
                    % (str(dbtrk.url), bug)
                )
                raise e

            try:
                bugsdb.insert_issue(issue_data, dbtrk.id)
                nbugs += 1
            except UnicodeEncodeError:
                printerr("UnicodeEncodeError: the issue %s couldn't be stored" % (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored" % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored" % (issue_data.issue))
                print e
예제 #48
0
파일: sf.py 프로젝트: MetricsGrimoire/Bicho
 def __parse_issue_description(self, soup):
     """
     """
     try:
         # Details is a list of unicode strings, so the
         # strings are joined into a string to build the
         # description field.
         details = soup.find({'label': True},
                             text=ISSUE_DETAILS_PATTERN).findNext('p')
         desc = u''.join(details.contents)
         return desc
     except:
         printerr('Error parsing issue description')
         raise SourceForgeParserError('Error parsing issue description')
예제 #49
0
 def _get_person(self, lpperson):
     """
     Returns Bicho People object from Launchpad person object
     """
     try:
         p = People(lpperson.name)
         p.set_name(lpperson.display_name)
         if lpperson.confirmed_email_addresses:
             for m in lpperson.confirmed_email_addresses:
                 p.set_email(m.email)
                 break
     except Exception, e:
         printerr(str(e))
         p = People("unknown")
예제 #50
0
    def analyze_project_bugs(self, bugs, dbtrk, bugsdb):
        analyzed = []
        nbugs = 0

        for bug in bugs:
            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception, e:
                printerr("Error in function analyzeBug with URL: ' \
                         '%s and Bug: %s" % (str(dbtrk.url), bug))
                raise e

            try:
                bugsdb.insert_issue(issue_data, dbtrk.id)
                nbugs += 1
            except UnicodeEncodeError:
                printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored"
                         % (issue_data.issue))
                print e
예제 #51
0
파일: sf.py 프로젝트: sferdi/lucenebug
 def __parse_issue_description(self, soup):
     """
     """
     try:
         # Details is a list of unicode strings, so the
         # strings are joined into a string to build the
         # description field.
         details = soup.find({
             'label': True
         }, text=ISSUE_DETAILS_PATTERN).findNext('p')
         desc = u''.join(details.contents)
         return desc
     except:
         printerr('Error parsing issue description')
         raise SourceForgeParserError('Error parsing issue description')
예제 #52
0
파일: jira.py 프로젝트: acs/Bicho
    def urlopen_auth(self, url):
        """
        Opens an URL using an authenticated session
        """
        request = urllib2.Request(url)

        opener = urllib2.build_opener()

        if self.is_auth_session():
            q = [str(c) + '=' + v for c, v in self.cookies.items()]
            opener.addheaders.append(('Cookie', '; '.join(q)))

        try:
            return opener.open(request)
        except (urllib2.HTTPError, urllib2.URLError) as e:
            printerr("Error code: %s, reason: %s" % (e.code, e.reason))
            raise e
예제 #53
0
    def urlopen_auth(self, url):
        """
        Opens an URL using an authenticated session
        """
        request = urllib2.Request(url)

        opener = urllib2.build_opener()

        if self.is_auth_session():
            q = [str(c) + '=' + v for c, v in self.cookies.items()]
            opener.addheaders.append(('Cookie', '; '.join(q)))

        try:
            return opener.open(request)
        except (urllib2.HTTPError, urllib2.URLError) as e:
            printerr("Error code: %s, reason: %s" % (e.code, e.reason))
            raise e
예제 #54
0
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        self.backend_token = None
        self.backend_user = None
        self.backend_password = None

        if hasattr(Config, 'backend_token'):
            self.backend_token = Config.backend_token
        elif hasattr(Config, 'backend_user') and hasattr(Config, 'backend_password'):
            self.backend_user = Config.backend_user
            self.backend_password = Config.backend_password
        else:
            msg = "\n--backend-user and --backend-password or --backend-token" + \
                  " are mandatory to download bugs from Github\n"
            printerr(msg)
            sys.exit(1)

        self.newest_first = Config.newest_first
        self.remaining_ratelimit = 0
예제 #55
0
파일: lp.py 프로젝트: orezpraw/Bicho
    def analyze_project_bugs(self, bugs, dbtrk, bugsdb):
        analyzed = []
        nbugs = 0

        for bug in bugs:
            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            issue_data = self.analyze_bug(bug)

            try:
                bugsdb.insert_issue(issue_data, dbtrk.id)
                nbugs += 1
            except UnicodeEncodeError:
                printerr("UnicodeEncodeError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored"
                         % (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored"
                         % (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
예제 #56
0
파일: github.py 프로젝트: pombredanne/Bicho
    def __init__(self):
        self.url = Config.url
        self.delay = Config.delay
        self.backend_token = None
        self.backend_user = None
        self.backend_password = None
        self.users = {}

        if hasattr(Config, 'backend_token'):
            self.backend_token = Config.backend_token
        elif hasattr(Config, 'backend_user') and hasattr(
                Config, 'backend_password'):
            self.backend_user = Config.backend_user
            self.backend_password = Config.backend_password
        else:
            msg = "\n--backend-user and --backend-password or --backend-token" + \
                  " are mandatory to download bugs from Github\n"
            printerr(msg)
            sys.exit(1)

        self.newest_first = Config.newest_first
        self.remaining_ratelimit = 0
예제 #57
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        issues_per_query = 250
        start_issue = 1

        bugs = []
        bugsdb = get_database(DBGoogleCodeBackend())

        # still useless
        bugsdb.insert_supported_traker("googlecode", "beta")
        trk = Tracker(Config.url, "googlecode", "beta")

        dbtrk = bugsdb.insert_tracker(trk)

        self.url = Config.url

        #  https://code.google.com/feeds/issues/p/mobile-time-care
        self.url_issues = Config.url + "/issues/full?max-results=1"
        printdbg("URL for getting metadata " + self.url_issues)

        d = feedparser.parse(self.url_issues)

        total_issues = int(d['feed']['opensearch_totalresults'])
        print "Total bugs: ", total_issues
        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (
            total_issues * Config.delay) / (60 * 60), "h)"

        while start_issue < total_issues:
            self.url_issues = Config.url + "/issues/full?max-results=" + str(
                issues_per_query)
            self.url_issues += "&start-index=" + str(start_issue)

            printdbg("URL for next issues " + self.url_issues)

            d = feedparser.parse(self.url_issues)

            for entry in d['entries']:
                try:
                    issue = self.analyze_bug(entry)
                    if issue is None:
                        continue
                    bugsdb.insert_issue(issue, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m", " issues ", str(
                            remaining)
                    time.sleep(Config.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug ")
                    pprint.pprint(entry)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue.issue))
예제 #58
0
    def run(self):
        """
        """
        printout("Running Bicho with delay of %s seconds" % (str(self.delay)))

        # limit=-1 is NOT recognized as 'all'.  500 is a reasonable limit. - allura code
        issues_per_query = 500
        start_page = 0

        bugs = []
        bugsdb = get_database(DBAlluraBackend())

        # still useless in allura
        bugsdb.insert_supported_traker("allura", "beta")
        trk = Tracker(Config.url, "allura", "beta")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date()

        # Date before the first ticket
        time_window_start = "1900-01-01T00:00:00Z"
        time_window_end = datetime.now().isoformat() + "Z"

        if last_mod_date:
            time_window_start = last_mod_date
            printdbg("Last bugs analyzed were modified on: %s" % last_mod_date)

        time_window = time_window_start + " TO  " + time_window_end

        self.url_issues = Config.url + "/search/?limit=1"
        self.url_issues += "&q="
        # A time range with all the tickets
        self.url_issues += urllib.quote("mod_date_dt:[" + time_window + "]")
        printdbg("URL for getting metadata " + self.url_issues)

        f = urllib.urlopen(self.url_issues)
        ticketTotal = json.loads(f.read())

        total_issues = int(ticketTotal['count'])
        total_pages = total_issues / issues_per_query
        print("Number of tickets: " + str(total_issues))

        if total_issues == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)
        remaining = total_issues

        print "ETA ", (total_issues * Config.delay) / (60), "m (", (
            total_issues * Config.delay) / (60 * 60), "h)"

        while start_page <= total_pages:
            self.url_issues = Config.url + "/search/?limit=" + str(
                issues_per_query)
            self.url_issues += "&page=" + str(start_page) + "&q="
            # A time range with all the tickets
            self.url_issues += urllib.quote("mod_date_dt:[" + time_window +
                                            "]")
            # Order by mod_date_dt desc
            self.url_issues += "&sort=mod_date_dt+asc"

            printdbg("URL for next issues " + self.url_issues)

            f = urllib.urlopen(self.url_issues)

            ticketList = json.loads(f.read())

            bugs = []
            for ticket in ticketList["tickets"]:
                bugs.append(ticket["ticket_num"])

            for bug in bugs:
                try:
                    issue_url = Config.url + "/" + str(bug)
                    issue_data = self.analyze_bug(issue_url)
                    if issue_data is None:
                        continue
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                    remaining -= 1
                    print "Remaining time: ", (
                        remaining) * Config.delay / 60, "m"
                    time.sleep(self.delay)
                except Exception, e:
                    printerr("Error in function analyze_bug " + issue_url)
                    traceback.print_exc(file=sys.stdout)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
예제 #59
0
파일: github.py 프로젝트: pombredanne/Bicho
    def run(self):
        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        bugsdb = get_database(DBGithubBackend())

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        printdbg(url)

        bugsdb.insert_supported_traker("github", "v3")
        trk = Tracker(url, "github", "v3")
        dbtrk = bugsdb.insert_tracker(trk)

        self.bugs_state = ALL_STATES
        self.pagecont = 1
        self.mod_date = None

        aux_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if aux_date:
            self.mod_date = aux_date.isoformat()
            printdbg("Last issue already cached: %s" % self.mod_date)

        try:
            bugs = self.__get_batch_bugs()
        except GitHubRateLimitReached:
            printout(
                "GitHub rate limit reached. To resume, wait some minutes.")
            sys.exit(0)

        nbugs = len(bugs)

        if len(bugs) == 0:
            if aux_date:
                printout("Bicho database up to date")
            else:
                printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        auxcont = 0
        while len(bugs) > 0:

            for bug in bugs:
                try:
                    issue_data = self.analyze_bug(bug)
                except GitHubRateLimitReached:
                    printout(
                        "GitHub rate limit reached. To resume, wait some minutes."
                    )
                    sys.exit(0)
                except Exception:
                    #FIXME it does not handle the e
                    msg = "Error in function analyzeBug with URL: %s and bug: %s" % (
                        url, bug)
                    printerr(msg)
                    raise

                try:
                    # we can have meta-trackers but we want to have the
                    # original tracker name
                    tr_url = self.__get_tracker_url_from_bug(bug)
                    if (tr_url != url):
                        aux_trk = Tracker(tr_url, "github", "v3")
                        dbtrk = bugsdb.insert_tracker(aux_trk)
                    bugsdb.insert_issue(issue_data, dbtrk.id)
                except UnicodeEncodeError:
                    printerr(
                        "UnicodeEncodeError: the issue %s couldn't be stored" %
                        (issue_data.issue))
                except Exception, e:
                    printerr("ERROR: ")
                    print e

                printdbg("Getting ticket number " + str(bug["number"]))
                time.sleep(self.delay)

            self.pagecont += 1

            try:
                bugs = self.__get_batch_bugs()
            except GitHubRateLimitReached:
                printout(
                    "GitHub rate limit reached. To resume, wait some minutes.")
                sys.exit(0)

            nbugs = nbugs + len(bugs)
예제 #60
0
    def run(self):

        print("Running Bicho with delay of %s seconds" % (str(self.delay)))

        url = self.url
        pname = None
        pname = self.__get_project_from_url()

        bugsdb = get_database(DBLaunchpadBackend())

        printdbg(url)

        # launchpad needs a temp directory to store cached data
        homedir = pwd.getpwuid(os.getuid()).pw_dir
        cachedir = os.path.join(homedir, ".cache/bicho/")
        if not os.path.exists(cachedir):
            os.makedirs(cachedir)
        cre_file = os.path.join(cachedir + 'launchpad-credential')
        self.lp = Launchpad.login_with('Bicho',
                                       'production',
                                       credentials_file=cre_file)

        aux_status = [
            "New", "Incomplete", "Opinion", "Invalid", "Won't Fix", "Expired",
            "Confirmed", "Triaged", "In Progress", "Fix Committed",
            "Fix Released", "Incomplete (with response)",
            "Incomplete (without response)"
        ]

        # still useless
        bugsdb.insert_supported_traker("launchpad", "x.x")
        trk = Tracker(url, "launchpad", "x.x")
        dbtrk = bugsdb.insert_tracker(trk)

        last_mod_date = bugsdb.get_last_modification_date(tracker_id=dbtrk.id)

        if last_mod_date:
            bugs = self.lp.projects[pname].searchTasks(
                status=aux_status,
                omit_duplicates=False,
                order_by='date_last_updated',
                modified_since=last_mod_date)
        else:
            bugs = self.lp.projects[pname].searchTasks(
                status=aux_status,
                omit_duplicates=False,
                order_by='date_last_updated')
        printdbg("Last bug already cached: %s" % last_mod_date)

        nbugs = len(bugs)

        if nbugs == 0:
            printout("No bugs found. Did you provide the correct url?")
            sys.exit(0)

        analyzed = []

        for bug in bugs:

            if bug.web_link in analyzed:
                continue  # for the bizarre error #338

            try:
                issue_data = self.analyze_bug(bug)
            except Exception:
                #FIXME it does not handle the e
                printerr("Error in function analyzeBug with URL: ' \
                '%s and Bug: %s" % (url, bug))
                raise

            try:
                # we can have meta-trackers but we want to have the original
                #tracker name
                tr_url = self.__get_tracker_url_from_bug(bug)
                if (tr_url != url):
                    aux_trk = Tracker(tr_url, "launchpad", "x.x")
                    dbtrk = bugsdb.insert_tracker(aux_trk)
                bugsdb.insert_issue(issue_data, dbtrk.id)
            except UnicodeEncodeError:
                printerr(
                    "UnicodeEncodeError: the issue %s couldn't be stored" %
                    (issue_data.issue))
            except NotFoundError:
                printerr("NotFoundError: the issue %s couldn't be stored" %
                         (issue_data.issue))
            except Exception, e:
                printerr("Unexpected Error: the issue %s couldn't be stored" %
                         (issue_data.issue))
                print e

            analyzed.append(bug.web_link)  # for the bizarre error #338
            time.sleep(self.delay)