def search(query, stats, expand=None): """ Perform page/comment search for given stats instance """ log.debug("Search query: {0}".format(query)) content = [] # Fetch data from the server in batches of MAX_RESULTS issues for batch in range(MAX_BATCHES): response = stats.parent.session.get( "{0}/rest/api/content/search?{1}".format( stats.parent.url, urllib.parse.urlencode({ "cql": query, "limit": MAX_RESULTS, "expand": expand, "start": batch * MAX_RESULTS }))) data = response.json() log.debug("Batch {0} result: {1} fetched".format( batch, listed(data["results"], "object"))) log.data(pretty(data)) content.extend(data["results"]) # If all issues fetched, we're done if data['_links'].get('next') is None: break return content
def search(query, stats): """ Perform issue search for given stats instance """ log.debug("Search query: {0}".format(query)) issues = [] # Fetch data from the server in batches of MAX_RESULTS issues for batch in range(MAX_BATCHES): result = stats.parent.session.open( "{0}/rest/api/latest/search?{1}".format( stats.parent.url, urllib.urlencode({ "jql": query, "fields": "summary,comment", "maxResults": MAX_RESULTS, "startAt": batch * MAX_RESULTS }))) data = json.loads(result.read()) log.debug("Batch {0} result: {1} fetched".format( batch, listed(data["issues"], "issue"))) log.data(pretty(data)) issues.extend(data["issues"]) # If all issues fetched, we're done if len(issues) >= data["total"]: break # Return the list of issue objects return [Issue(issue, prefix=stats.parent.prefix) for issue in issues]
def get_actions(self, filters, since=None, before=None, limit=1000): """ Example of data structure: https://api.trello.com/1/members/ben/actions?limit=2 """ if limit > 1000: raise NotImplementedError( "Fetching more than 1000 items is not implemented") resp = self.stats.session.open("{0}/members/{1}/actions?{2}".format( self.stats.url, self.username, urllib.parse.urlencode({ "key": self.key, "token": self.token, "filter": filters, "limit": limit, "since": str(since), "before": str(before) }))) actions = json.loads(resp.read()) log.data(pretty(actions)) # print[act for act in actions if "shortLink" not in # act['data']['board'].keys()] actions = [ act for act in actions if act['data']['board']['id'] in self.board_ids ] return actions
def search(self, query): """ Perform GitHub query """ result = [] url = self.url + "/" + query + f"&per_page={PER_PAGE}" while True: # Fetch the query log.debug(f"GitHub query: {url}") try: response = requests.get(url, headers=self.headers) log.debug(f"Response headers:\n{response.headers}") except requests.exceptions.RequestException as error: log.debug(error) raise ReportError(f"GitHub search on {self.url} failed.") # Parse fetched json data try: data = json.loads(response.text)["items"] result.extend(data) except requests.exceptions.JSONDecodeError as error: log.debug(error) raise ReportError(f"GitHub JSON failed: {response.text}.") # Update url to the next page, break if no next page provided if 'next' in response.links: url = response.links['next']['url'] else: break log.debug("Result: {0} fetched".format(listed(len(result), "item"))) log.data(pretty(result)) return result
def get_actions(self, filters, since=None, before=None, limit=1000): """ Example of data structure: https://api.trello.com/1/members/ben/actions?limit=2 """ if limit > 1000: raise NotImplementedError( "Fetching more than 1000 items is not implemented") resp = self.stats.session.open( "{0}/members/{1}/actions?{2}".format( self.stats.url, self.username, urllib.urlencode({ "key": self.key, "token": self.token, "filter": filters, "limit": limit, "since": str(since), "before": str(before)}))) actions = json.loads(resp.read()) log.data(pretty(actions)) # print[act for act in actions if "shortLink" not in # act['data']['board'].keys()] actions = [act for act in actions if act['data'] ['board']['id'] in self.board_ids] return actions
def search(query, stats): """ Perform issue search for given stats instance """ log.debug("Search query: {0}".format(query)) issues = [] # Fetch data from the server in batches of MAX_RESULTS issues for batch in range(MAX_BATCHES): response = stats.parent.session.get( "{0}/rest/api/latest/search?{1}".format( stats.parent.url, urllib.parse.urlencode({ "jql": query, "fields": "summary,comment", "maxResults": MAX_RESULTS, "startAt": batch * MAX_RESULTS}))) data = response.json() if not response.ok: try: error = " ".join(data["errorMessages"]) except KeyError: error = "unknown" raise ReportError( f"Failed to fetch jira issues for query '{query}'. " f"The reason was '{response.reason}' " f"and the error was '{error}'.") log.debug("Batch {0} result: {1} fetched".format( batch, listed(data["issues"], "issue"))) log.data(pretty(data)) issues.extend(data["issues"]) # If all issues fetched, we're done if len(issues) >= data["total"]: break # Return the list of issue objects return [Issue(issue, prefix=stats.parent.prefix) for issue in issues]
def search(self, user, since, until, target_type, action_name): """ Perform GitLab query """ if not self.user: self.user = self.get_user(user) if not self.events: self.events = self.user_events(self.user['id'], since) result = [] for event in self.events: created_at = dateutil.parser.parse(event['created_at']).date() if (event['target_type'] == target_type and event['action_name'] == action_name and since.date <= created_at and until.date >= created_at): result.append(event) log.debug("Result: {0} fetched".format(listed(len(result), "item"))) log.data(pretty(result)) return result
def search(self, query): """ Perform GitHub query """ url = self.url + "/" + query log.debug("GitHub query: {0}".format(url)) try: request = urllib.request.Request(url, headers=self.headers) response = urllib.request.urlopen(request) log.debug("Response headers:\n{0}".format( str(response.info()).strip())) except urllib.error.URLError as error: log.debug(error) raise ReportError("GitHub search on {0} failed.".format(self.url)) result = json.loads(response.read())["items"] log.debug("Result: {0} fetched".format(listed(len(result), "item"))) log.data(pretty(result)) return result
def search(self, query): """ Perform GitHub query """ url = self.url + "/" + query log.debug("GitHub query: {0}".format(url)) try: request = urllib2.Request(url, headers=self.headers) response = urllib2.urlopen(request) log.debug("Response headers:\n{0}".format( unicode(response.info()).strip())) except urllib2.URLError as error: log.debug(error) raise ReportError( "GitHub search on {0} failed.".format(self.url)) result = json.loads(response.read())["items"] log.debug("Result: {0} fetched".format(listed(len(result), "item"))) log.data(pretty(result)) return result
def search(self, query): """ Perform Bodhi query """ result = [] current_page = 1 original_query = query while current_page: log.debug("Bodhi query: {0}".format(query)) client = BodhiClient(self.url) data = client.send_request(query, verb='GET') objects = data['updates'] log.debug("Result: {0} fetched".format(listed( len(objects), "item"))) log.data(pretty(data)) result.extend(objects) if current_page < data['pages']: current_page = current_page + 1 query = f"{original_query}&page={current_page}" else: current_page = None return result
def search(self, query, pagination, result_field): """ Perform Pagure query """ result = [] url = "/".join((self.url, query)) while url: log.debug("Pagure query: {0}".format(url)) try: response = requests.get(url, headers=self.headers) log.data("Response headers:\n{0}".format(response.headers)) except requests.RequestException as error: log.error(error) raise ReportError("Pagure search {0} failed.".format(self.url)) data = response.json() objects = data[result_field] log.debug("Result: {0} fetched".format(listed( len(objects), "item"))) log.data(pretty(data)) # FIXME later: Work around https://pagure.io/pagure/issue/4057 if not objects: break result.extend(objects) url = data[pagination]['next'] return result
def search(query, stats): """ Perform issue search for given stats instance """ log.debug("Search query: {0}".format(query)) issues = [] # Fetch data from the server in batches of MAX_RESULTS issues for batch in range(MAX_BATCHES): result = stats.parent.session.open( "{0}/rest/api/latest/search?{1}".format( stats.parent.url, urllib.urlencode({ "jql": query, "fields": "summary,comment", "maxResults": MAX_RESULTS, "startAt": batch * MAX_RESULTS}))) data = json.loads(result.read()) log.debug("Batch {0} result: {1} fetched".format( batch, listed(data["issues"], "issue"))) log.data(pretty(data)) issues.extend(data["issues"]) # If all issues fetched, we're done if len(issues) >= data["total"]: break # Return the list of issue objects return [Issue(issue, prefix=stats.parent.prefix) for issue in issues]
def search(self, query, pagination, result_field): """ Perform Pagure query """ result = [] url = "/".join((self.url, query)) while url: log.debug("Pagure query: {0}".format(url)) try: response = requests.get(url, headers=self.headers) log.data("Response headers:\n{0}".format(response.headers)) except requests.RequestException as error: log.error(error) raise ReportError("Pagure search {0} failed.".format(self.url)) data = response.json() objects = data[result_field] log.debug("Result: {0} fetched".format( listed(len(objects), "item"))) log.data(pretty(data)) # FIXME later: Work around https://pagure.io/pagure/issue/4057 if not objects: break result.extend(objects) url = data[pagination]['next'] return result
def _fetch_activities(self): """ Get organization activity, handle pagination """ activities = [] # Prepare url of the first page url = '{0}/organizations/{1}/activity/'.format( self.url, self.organization) while url: # Fetch one page of activities try: log.debug('Fetching activity data: {0}'.format(url)) response = requests.get(url, headers=self.headers) if not response.ok: log.error(response.text) raise ReportError('Failed to fetch Sentry activities.') data = response.json() log.data("Response headers:\n{0}".format( pretty(response.headers))) log.debug("Fetched {0}.".format(listed(len(data), 'activity'))) log.data(pretty(data)) for activity in [Activity(item) for item in data]: # We've reached the last page, older records not relevant if activity.created < self.stats.options.since.date: return activities # Store only relevant activites (before until date) if activity.created < self.stats.options.until.date: log.details("Activity: {0}".format(activity)) activities.append(activity) except requests.RequestException as error: log.debug(error) raise ReportError( 'Failed to fetch Sentry activities from {0}'.format(url)) # Check for possible next page try: url = NEXT_PAGE.search(response.headers['Link']).groups()[0] except AttributeError: url = None return activities
def _get_gitlab_api_json(self, endpoint): log.debug("Query: {0}".format(endpoint)) result = self._get_gitlab_api(endpoint).json() log.data(pretty(result)) return result