Exemplo n.º 1
0
 def search(query, stats):
     """ Perform issue search for given stats instance """
     log.debug("Search query: {0}".format(query))
     issues = []
     # Fetch data from the server in batches of MAX_RESULTS issues
     for batch in range(MAX_BATCHES):
         response = stats.parent.session.get(
             "{0}/rest/api/latest/search?{1}".format(
                 stats.parent.url, urllib.parse.urlencode({
                     "jql": query,
                     "fields": "summary,comment",
                     "maxResults": MAX_RESULTS,
                     "startAt": batch * MAX_RESULTS})))
         data = response.json()
         if not response.ok:
             try:
                 error = " ".join(data["errorMessages"])
             except KeyError:
                 error = "unknown"
             raise ReportError(
                 f"Failed to fetch jira issues for query '{query}'. "
                 f"The reason was '{response.reason}' "
                 f"and the error was '{error}'.")
         log.debug("Batch {0} result: {1} fetched".format(
             batch, listed(data["issues"], "issue")))
         log.data(pretty(data))
         issues.extend(data["issues"])
         # If all issues fetched, we're done
         if len(issues) >= data["total"]:
             break
     # Return the list of issue objects
     return [Issue(issue, prefix=stats.parent.prefix) for issue in issues]
Exemplo n.º 2
0
 def search(query, stats):
     """ Perform issue search for given stats instance """
     log.debug("Search query: {0}".format(query))
     issues = []
     # Fetch data from the server in batches of MAX_RESULTS issues
     for batch in range(MAX_BATCHES):
         result = stats.parent.session.open(
             "{0}/rest/api/latest/search?{1}".format(
                 stats.parent.url,
                 urllib.urlencode({
                     "jql": query,
                     "fields": "summary,comment",
                     "maxResults": MAX_RESULTS,
                     "startAt": batch * MAX_RESULTS
                 })))
         data = json.loads(result.read())
         log.debug("Batch {0} result: {1} fetched".format(
             batch, listed(data["issues"], "issue")))
         log.data(pretty(data))
         issues.extend(data["issues"])
         # If all issues fetched, we're done
         if len(issues) >= data["total"]:
             break
     # Return the list of issue objects
     return [Issue(issue, prefix=stats.parent.prefix) for issue in issues]
Exemplo n.º 3
0
    def search(query, stats, expand=None):
        """ Perform page/comment search for given stats instance """
        log.debug("Search query: {0}".format(query))
        content = []

        # Fetch data from the server in batches of MAX_RESULTS issues
        for batch in range(MAX_BATCHES):
            response = stats.parent.session.get(
                "{0}/rest/api/content/search?{1}".format(
                    stats.parent.url,
                    urllib.parse.urlencode({
                        "cql": query,
                        "limit": MAX_RESULTS,
                        "expand": expand,
                        "start": batch * MAX_RESULTS
                    })))
            data = response.json()
            log.debug("Batch {0} result: {1} fetched".format(
                batch, listed(data["results"], "object")))
            log.data(pretty(data))
            content.extend(data["results"])
            # If all issues fetched, we're done
            if data['_links'].get('next') is None:
                break
        return content
Exemplo n.º 4
0
    def search(self, query):
        """ Perform GitHub query """
        result = []
        url = self.url + "/" + query + f"&per_page={PER_PAGE}"

        while True:
            # Fetch the query
            log.debug(f"GitHub query: {url}")
            try:
                response = requests.get(url, headers=self.headers)
                log.debug(f"Response headers:\n{response.headers}")
            except requests.exceptions.RequestException as error:
                log.debug(error)
                raise ReportError(f"GitHub search on {self.url} failed.")

            # Parse fetched json data
            try:
                data = json.loads(response.text)["items"]
                result.extend(data)
            except requests.exceptions.JSONDecodeError as error:
                log.debug(error)
                raise ReportError(f"GitHub JSON failed: {response.text}.")

            # Update url to the next page, break if no next page provided
            if 'next' in response.links:
                url = response.links['next']['url']
            else:
                break

        log.debug("Result: {0} fetched".format(listed(len(result), "item")))
        log.data(pretty(result))
        return result
Exemplo n.º 5
0
Arquivo: trello.py Projeto: psss/did
    def __init__(self, option, name=None, parent=None, user=None):
        name = "Trello updates for {0}".format(option)
        super(TrelloStatsGroup, self).__init__(option=option,
                                               name=name,
                                               parent=parent,
                                               user=user)

        # map appropriate API methods to Classes
        filter_map = {
            'Boards': {},
            'Lists': {},
            'Cards': {
                'commentCard': TrelloCardsCommented,
                'updateCard': TrelloCardsUpdated,
                'updateCard:closed': TrelloCardsClosed,
                'updateCard:idList': TrelloCardsMoved,
                'createCard': TrelloCardsCreated
            },
            'Checklists': {
                'updateCheckItemStateOnCard': TrelloCheckItem
            }
        }
        self._session = None
        self.url = "https://trello.com/1"
        config = dict(Config().section(option))

        positional_args = ['apikey', 'token']
        if (not set(positional_args).issubset(set(config.keys()))
                and "user" not in config):
            raise ReportError(
                "No ({0}) or 'user' set in the [{1}] section".format(
                    listed(positional_args, quote="'"), option))

        optional_args = ["board_links", "apikey", "token"]
        for arg in optional_args:
            if arg not in config:
                config[arg] = ""

        # Skip API instance initialization when building options
        if self.options is None:
            trello = None
        else:
            trello = TrelloAPI(stats=self, config=config)

        try:
            filters = split(config["filters"])
        except KeyError:
            filters = DEFAULT_FILTERS
        for filt_group in sorted(filter_map):
            for filt in sorted(filter_map[filt_group]):
                if filters != [""] and filt not in filters:
                    continue
                self.stats.append(filter_map[filt_group][filt](trello=trello,
                                                               filt=filt,
                                                               option=option +
                                                               "-" + filt,
                                                               parent=self))
Exemplo n.º 6
0
def test_listed():
    from did.utils import listed
    assert listed
    assert listed(range(1)) == "0"
    assert listed(range(2)) == "0 and 1"
    assert listed(range(3), quote='"') == '"0", "1" and "2"'
    assert listed(range(4), max=3) == "0, 1, 2 and 1 more"
    assert listed(range(5), 'number', max=3) == "0, 1, 2 and 2 more numbers"
    assert listed(range(6), 'category') == "6 categories"
    assert listed(7, "leaf", "leaves") == "7 leaves"
    assert listed([], "item", max=0) == "0 items"
Exemplo n.º 7
0
def test_listed():
    from did.utils import listed
    assert listed
    assert listed(range(1)) == "0"
    assert listed(range(2)) == "0 and 1"
    assert listed(range(3), quote='"') == '"0", "1" and "2"'
    assert listed(range(4), max=3) == "0, 1, 2 and 1 more"
    assert listed(range(5), 'number', max=3) == "0, 1, 2 and 2 more numbers"
    assert listed(range(6), 'category') == "6 categories"
    assert listed(7, "leaf", "leaves") == "7 leaves"
    assert listed([], "item", max=0) == "0 items"
Exemplo n.º 8
0
Arquivo: trello.py Projeto: psss/did
    def __init__(self, option, name=None, parent=None, user=None):
        name = "Trello updates for {0}".format(option)
        super(TrelloStatsGroup, self).__init__(
            option=option, name=name, parent=parent, user=user)

        # map appropriate API methods to Classes
        filter_map = {
            'Boards': {},
            'Lists': {},
            'Cards': {
                'commentCard': TrelloCardsCommented,
                'updateCard': TrelloCardsUpdated,
                'updateCard:closed': TrelloCardsClosed,
                'updateCard:idList': TrelloCardsMoved,
                'createCard': TrelloCardsCreated},
            'Checklists': {
                'updateCheckItemStateOnCard': TrelloCheckItem}
        }
        self._session = None
        self.url = "https://trello.com/1"
        config = dict(Config().section(option))

        positional_args = ['apikey', 'token']
        if (not set(positional_args).issubset(set(config.keys()))
                and "user" not in config):
            raise ReportError(
                "No ({0}) or 'user' set in the [{1}] section".format(
                    listed(positional_args, quote="'"), option))

        optional_args = ["board_links", "apikey", "token"]
        for arg in optional_args:
            if arg not in config:
                config[arg] = ""

        # Skip API instance initialization when building options
        if self.options is None:
            trello = None
        else:
            trello = TrelloAPI(stats=self, config=config)

        try:
            filters = split(config["filters"])
        except KeyError:
            filters = DEFAULT_FILTERS
        for filt_group in filter_map:
            for filt in filter_map[filt_group]:
                if filters != [""] and filt not in filters:
                    continue
                self.stats.append(filter_map[filt_group][filt](
                    trello=trello,
                    filt=filt,
                    option=option + "-" + filt,
                    parent=self))
Exemplo n.º 9
0
 def search(self, user, since, until, target_type, action_name):
     """ Perform GitLab query """
     if not self.user:
         self.user = self.get_user(user)
     if not self.events:
         self.events = self.user_events(self.user['id'], since, until)
     result = []
     for event in self.events:
         created_at = dateutil.parser.parse(event['created_at']).date()
         if (event['target_type'] == target_type
                 and event['action_name'] == action_name
                 and since.date <= created_at and until.date >= created_at):
             result.append(event)
     log.debug("Result: {0} fetched".format(listed(len(result), "item")))
     return result
Exemplo n.º 10
0
Arquivo: gitlab.py Projeto: psss/did
 def search(self, user, since, until, target_type, action_name):
     """ Perform GitLab query """
     if not self.user:
         self.user = self.get_user(user)
     if not self.events:
         self.events = self.user_events(self.user['id'], since, until)
     result = []
     for event in self.events:
         created_at = dateutil.parser.parse(event['created_at']).date()
         if (event['target_type'] == target_type and
                 event['action_name'] == action_name and
                 since.date <= created_at and until.date >= created_at):
             result.append(event)
     log.debug("Result: {0} fetched".format(listed(len(result), "item")))
     return result
Exemplo n.º 11
0
 def search(self, query):
     """ Perform GitHub query """
     url = self.url + "/" + query
     log.debug("GitHub query: {0}".format(url))
     try:
         request = urllib.request.Request(url, headers=self.headers)
         response = urllib.request.urlopen(request)
         log.debug("Response headers:\n{0}".format(
             str(response.info()).strip()))
     except urllib.error.URLError as error:
         log.debug(error)
         raise ReportError("GitHub search on {0} failed.".format(self.url))
     result = json.loads(response.read())["items"]
     log.debug("Result: {0} fetched".format(listed(len(result), "item")))
     log.data(pretty(result))
     return result
Exemplo n.º 12
0
Arquivo: github.py Projeto: barraq/did
 def search(self, query):
     """ Perform GitHub query """
     url = self.url + "/" + query
     log.debug("GitHub query: {0}".format(url))
     try:
         request = urllib2.Request(url, headers=self.headers)
         response = urllib2.urlopen(request)
         log.debug("Response headers:\n{0}".format(
             unicode(response.info()).strip()))
     except urllib2.URLError as error:
         log.debug(error)
         raise ReportError(
             "GitHub search on {0} failed.".format(self.url))
     result = json.loads(response.read())["items"]
     log.debug("Result: {0} fetched".format(listed(len(result), "item")))
     log.data(pretty(result))
     return result
Exemplo n.º 13
0
Arquivo: bodhi.py Projeto: psss/did
 def search(self, query):
     """ Perform Bodhi query """
     result = []
     current_page = 1
     original_query = query
     while current_page:
         log.debug("Bodhi query: {0}".format(query))
         client = BodhiClient(self.url)
         data = client.send_request(query, verb='GET')
         objects = data['updates']
         log.debug("Result: {0} fetched".format(listed(
             len(objects), "item")))
         log.data(pretty(data))
         result.extend(objects)
         if current_page < data['pages']:
             current_page = current_page + 1
             query = f"{original_query}&page={current_page}"
         else:
             current_page = None
     return result
Exemplo n.º 14
0
 def search(self, query, pagination, result_field):
     """ Perform Pagure query """
     result = []
     url = "/".join((self.url, query))
     while url:
         log.debug("Pagure query: {0}".format(url))
         try:
             response = requests.get(url, headers=self.headers)
             log.data("Response headers:\n{0}".format(response.headers))
         except requests.RequestException as error:
             log.error(error)
             raise ReportError("Pagure search {0} failed.".format(self.url))
         data = response.json()
         objects = data[result_field]
         log.debug("Result: {0} fetched".format(listed(
             len(objects), "item")))
         log.data(pretty(data))
         # FIXME later: Work around https://pagure.io/pagure/issue/4057
         if not objects:
             break
         result.extend(objects)
         url = data[pagination]['next']
     return result
Exemplo n.º 15
0
 def search(query, stats):
     """ Perform issue search for given stats instance """
     log.debug("Search query: {0}".format(query))
     issues = []
     # Fetch data from the server in batches of MAX_RESULTS issues
     for batch in range(MAX_BATCHES):
         result = stats.parent.session.open(
             "{0}/rest/api/latest/search?{1}".format(
                 stats.parent.url, urllib.urlencode({
                     "jql": query,
                     "fields": "summary,comment",
                     "maxResults": MAX_RESULTS,
                     "startAt": batch * MAX_RESULTS})))
         data = json.loads(result.read())
         log.debug("Batch {0} result: {1} fetched".format(
             batch, listed(data["issues"], "issue")))
         log.data(pretty(data))
         issues.extend(data["issues"])
         # If all issues fetched, we're done
         if len(issues) >= data["total"]:
             break
     # Return the list of issue objects
     return [Issue(issue, prefix=stats.parent.prefix) for issue in issues]
Exemplo n.º 16
0
Arquivo: pagure.py Projeto: psss/did
 def search(self, query, pagination, result_field):
     """ Perform Pagure query """
     result = []
     url = "/".join((self.url, query))
     while url:
         log.debug("Pagure query: {0}".format(url))
         try:
             response = requests.get(url, headers=self.headers)
             log.data("Response headers:\n{0}".format(response.headers))
         except requests.RequestException as error:
             log.error(error)
             raise ReportError("Pagure search {0} failed.".format(self.url))
         data = response.json()
         objects = data[result_field]
         log.debug("Result: {0} fetched".format(
             listed(len(objects), "item")))
         log.data(pretty(data))
         # FIXME later: Work around https://pagure.io/pagure/issue/4057
         if not objects:
             break
         result.extend(objects)
         url = data[pagination]['next']
     return result
Exemplo n.º 17
0
 def _fetch_activities(self):
     """ Get organization activity, handle pagination """
     activities = []
     # Prepare url of the first page
     url = '{0}/organizations/{1}/activity/'.format(
         self.url, self.organization)
     while url:
         # Fetch one page of activities
         try:
             log.debug('Fetching activity data: {0}'.format(url))
             response = requests.get(url, headers=self.headers)
             if not response.ok:
                 log.error(response.text)
                 raise ReportError('Failed to fetch Sentry activities.')
             data = response.json()
             log.data("Response headers:\n{0}".format(
                 pretty(response.headers)))
             log.debug("Fetched {0}.".format(listed(len(data), 'activity')))
             log.data(pretty(data))
             for activity in [Activity(item) for item in data]:
                 # We've reached the last page, older records not relevant
                 if activity.created < self.stats.options.since.date:
                     return activities
                 # Store only relevant activites (before until date)
                 if activity.created < self.stats.options.until.date:
                     log.details("Activity: {0}".format(activity))
                     activities.append(activity)
         except requests.RequestException as error:
             log.debug(error)
             raise ReportError(
                 'Failed to fetch Sentry activities from {0}'.format(url))
         # Check for possible next page
         try:
             url = NEXT_PAGE.search(response.headers['Link']).groups()[0]
         except AttributeError:
             url = None
     return activities