def search(query, parent, options): """ Perform Trac search """ # Extend the default max number of tickets to be fetched query = "{0}&max={1}".format(query, MAX_TICKETS) log.debug("Search query: {0}".format(query)) try: result = parent.proxy.ticket.query(query) except xmlrpclib.Fault as error: log.error("An error encountered, while searching for tickets.") raise ReportError(error) except xmlrpclib.ProtocolError as error: log.debug(error) log.error("Trac url: {0}".format(parent.url)) raise ReportError( "Unable to contact Trac server. Is the url above correct?") log.debug("Search result: {0}".format(result)) # Fetch tickets and their history using multicall multicall = xmlrpclib.MultiCall(parent.proxy) for ticket_id in sorted(result): multicall.ticket.get(ticket_id) multicall.ticket.changeLog(ticket_id) log.debug(u"Fetching trac tickets and their history") result = list(multicall()) tickets = result[::2] changelogs = result[1::2] # Print debugging info for ticket, changelog in zip(tickets, changelogs): log.debug("Fetched ticket #{0}".format(ticket[0])) log.debug(pretty(ticket)) log.debug("Changelog:") log.debug(pretty(changelog)) # Return the list of ticket objects return [ Trac(ticket, changelg, parent=parent, options=options) for ticket, changelg in zip(tickets, changelogs)]
def search(self, query, options): """ Perform Bugzilla search """ query["query_format"] = "advanced" log.debug("Search query:") log.debug(pretty(query)) # Fetch bug info try: result = self.server.query(query) except xmlrpclib.Fault as error: # Ignore non-existent users (this is necessary for users with # several email aliases to allow them using --merge/--total) if "not a valid username" in unicode(error): log.debug(error) return [] # Otherwise suggest to bake bugzilla cookies log.error("An error encountered, while searching for bugs.") log.debug(error) raise ReportError("Have you prepared your cookies by 'bugzilla login'?") log.debug("Search result:") log.debug(pretty(result)) bugs = dict((bug.id, bug) for bug in result) # Fetch bug history log.debug("Fetching bug history") result = self.server._proxy.Bug.history({"ids": bugs.keys()}) log.debug(pretty(result)) history = dict((bug["id"], bug["history"]) for bug in result["bugs"]) # Fetch bug comments log.debug("Fetching bug comments") result = self.server._proxy.Bug.comments({"ids": bugs.keys()}) log.debug(pretty(result)) comments = dict((int(bug), data["comments"]) for bug, data in result["bugs"].items()) # Create bug objects return [self.parent.bug(bugs[id], history[id], comments[id], parent=self.parent) for id in bugs]
def session(self): """ Initialize the session """ if self._session is None: self._session = requests.Session() log.debug("Connecting to {0}".format(self.auth_url)) # Disable SSL warning when ssl_verify is False if not self.ssl_verify: requests.packages.urllib3.disable_warnings( InsecureRequestWarning) if self.auth_type == "basic": basic_auth = (self.auth_username, self.auth_password) response = self._session.get(self.auth_url, auth=basic_auth, verify=self.ssl_verify) else: gssapi_auth = HTTPSPNEGOAuth(mutual_authentication=DISABLED) response = self._session.get(self.auth_url, auth=gssapi_auth, verify=self.ssl_verify) try: response.raise_for_status() except requests.exceptions.HTTPError as error: log.error(error) raise ReportError( "Confluence authentication failed. Try kinit.") return self._session
def session(self): """ Initialize the session """ if self._session is None: self._session = requests.Session() log.debug("Connecting to {0}".format(self.auth_url)) if self.auth_type == 'basic': data = { "username": self.auth_username, "password": self.auth_password } headers = { "Content-type": "application/json", "Accept": "application/json" } response = self._session.get(self.auth_url, headers=headers, data=data) else: gssapi_auth = HTTPSPNEGOAuth(mutual_authentication=DISABLED) response = self._session.get(self.auth_url, auth=gssapi_auth) try: response.raise_for_status() except requests.exceptions.HTTPError as error: log.error(error) raise ReportError('Jira authentication failed. Try kinit.') return self._session
def __init__(self, option, name=None, parent=None, user=None): name = "Work on {0}".format(option) StatsGroup.__init__(self, option, name, parent, user) for repo, path in did.base.Config().section(option): if path.endswith('/*'): try: directories = os.listdir(path[:-1]) except OSError as error: log.error(error) raise did.base.ConfigError( "Invalid path in the [{0}] section".format(option)) for repo_dir in sorted(directories): repo_path = path.replace('*', repo_dir) # Check directories only if not os.path.isdir(repo_path): continue # Silently ignore non-git directories if not os.path.exists(os.path.join(repo_path, ".git")): log.debug("Skipping non-git directory '{0}'.".format( repo_path)) continue self.stats.append( GitCommits(option="{0}-{1}".format(repo, repo_dir), parent=self, path=repo_path, name="Work on {0}/{1}".format( repo, repo_dir))) else: self.stats.append( GitCommits(option=option + "-" + repo, parent=self, path=path, name="Work on {0}".format(repo)))
def __init__(self, option, name=None, parent=None, user=None): name = "Work on {0}".format(option) StatsGroup.__init__(self, option, name, parent, user) for repo, path in did.base.Config().section(option): if path.endswith('/*'): try: directories = os.listdir(path[:-1]) except OSError as error: log.error(error) raise did.base.ConfigError( "Invalid path in the [{0}] section".format(option)) for repo_dir in sorted(directories): repo_path = path.replace('*', repo_dir) # Check directories only if not os.path.isdir(repo_path): continue # Silently ignore non-git directories if not os.path.exists(os.path.join(repo_path, ".git")): log.debug("Skipping non-git directory '{0}'.".format( repo_path)) continue self.stats.append(GitCommits( option="{0}-{1}".format(repo, repo_dir), parent=self, path=repo_path, name="Work on {0}/{1}".format(repo, repo_dir))) else: self.stats.append(GitCommits( option=option + "-" + repo, parent=self, path=path, name="Work on {0}".format(repo)))
def session(self): """ Initialize the session """ if self._session is None: self._session = requests.Session() log.debug("Connecting to {0}".format(self.auth_url)) # Disable SSL warning when ssl_verify is False if not self.ssl_verify: requests.packages.urllib3.disable_warnings( InsecureRequestWarning) if self.auth_type == 'basic': basic_auth = (self.auth_username, self.auth_password) response = self._session.get( self.auth_url, auth=basic_auth, verify=self.ssl_verify) elif self.auth_type == "token": self.session.headers["Authorization"] = f"Bearer {self.token}" response = self._session.get( "{0}/rest/api/2/myself".format(self.url), verify=self.ssl_verify) else: gssapi_auth = HTTPSPNEGOAuth(mutual_authentication=DISABLED) response = self._session.get( self.auth_url, auth=gssapi_auth, verify=self.ssl_verify) try: response.raise_for_status() except requests.exceptions.HTTPError as error: log.error(error) raise ReportError( "Jira authentication failed. Check credentials or kinit.") if self.token_expiration: response = self._session.get( "{0}/rest/pat/latest/tokens".format(self.url), verify=self.ssl_verify) try: response.raise_for_status() token_found = None for token in response.json(): if token["name"] == self.token_name: token_found = token break if token_found is None: raise ValueError( f"Can't check validity for the '{self.token_name}' " f"token as it doesn't exist.") from datetime import datetime expiring_at = datetime.strptime( token_found["expiringAt"], r"%Y-%m-%dT%H:%M:%S.%f%z") delta = ( expiring_at.astimezone() - datetime.now().astimezone()) if delta.days < self.token_expiration: log.warn( f"Jira token '{self.token_name}' " f"expires in {delta.days} days.") except (requests.exceptions.HTTPError, KeyError, ValueError) as error: log.warn(error) return self._session
def get_data(self): """ Get organization activity in JSON representation """ url = self.url + "organizations/" + self.organization + "/activity/" headers = {'Authorization': 'Bearer {0}'.format(self.token)} request = urllib2.Request(url, None, headers) log.debug("Getting activity data from server.") try: response = urllib2.urlopen(request) except urllib2.URLError as e: log.error("An error encountered while getting data from server.") log.debug(e) raise ReportError("Could not get data. {0}.".format(str(e))) return json.load(response)
def check(self): """ Check the stats if enabled. """ if not self.enabled(): return try: self.fetch() except (xmlrpc.client.Fault, did.base.ConfigError) as error: log.error(error) self._error = True # Raise the exception if debugging if not self.options or self.options.debug: raise # Show the results stats (unless merging) if self.options and not self.options.merge: self.show()
def check(self): """ Check the stats if enabled. """ if not self.enabled(): return try: self.fetch() except (xmlrpclib.Fault, did.base.ConfigError) as error: log.error(error) self._error = True # Raise the exception if debugging if not self.options or self.options.debug: raise # Show the results stats (unless merging) if self.options and not self.options.merge: self.show()
def session(self): """ Initialize the session """ if self._session is None: self._session = requests.Session() log.debug("Connecting to {0}".format(self.auth_url)) if self.auth_type == 'basic': basic_auth = (self.auth_username, self.auth_password) response = self._session.get(self.auth_url, auth=basic_auth) else: gssapi_auth = HTTPSPNEGOAuth(mutual_authentication=DISABLED) response = self._session.get(self.auth_url, auth=gssapi_auth) try: response.raise_for_status() except requests.exceptions.HTTPError as error: log.error(error) raise ReportError('Jira authentication failed. Try kinit.') return self._session
def commits(self, user, options): """ List commits for given user. """ # Prepare the command command = "git log --all --author={0}".format(user.login).split() command.append("--format=format:%h - %s") command.append("--since='{0} 00:00:00'".format(options.since)) command.append("--until='{0} 00:00:00'".format(options.until)) if options.verbose: command.append("--name-only") log.info(u"Checking commits in {0}".format(self.path)) log.debug(pretty(command)) # Get the commit messages try: process = subprocess.Popen( command, cwd=self.path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as error: log.debug(error) raise ReportError( "Unable to access git repo '{0}'".format(self.path)) output, errors = process.communicate() log.debug("git log output:") log.debug(output) if process.returncode == 0: if not output: return [] else: if not options.verbose: return unicode(output, "utf8").split("\n") commits = [] for commit in unicode(output, "utf8").split("\n\n"): summary = commit.split("\n")[0] directory = re.sub("/[^/]+$", "", commit.split("\n")[1]) commits.append("{0}\n{1}* {2}".format( summary, 8 * " ", directory)) return commits else: log.error(errors.strip()) raise ReportError( "Unable to check commits in '{0}'".format(self.path))
def search(self, query, options): """ Perform Bugzilla search """ query["query_format"] = "advanced" query["limit"] = "0" log.debug("Search query:") log.debug(pretty(query)) # Fetch bug info try: result = self.server.query(query) except xmlrpc.client.Fault as error: # Ignore non-existent users (this is necessary for users with # several email aliases to allow them using --merge/--total) if "not a valid username" in str(error): log.debug(error) return [] # Otherwise suggest to bake bugzilla cookies log.error("An error encountered, while searching for bugs.") log.debug(error) raise ReportError( "Have you baked cookies using the 'bugzilla login' command?") log.debug("Search result:") log.debug(pretty(result)) bugs = dict((bug.id, bug) for bug in result) # Fetch bug history log.debug("Fetching bug history") result = self.server._proxy.Bug.history({'ids': list(bugs.keys())}) log.debug(pretty(result)) history = dict((bug["id"], bug["history"]) for bug in result["bugs"]) # Fetch bug comments log.debug("Fetching bug comments") result = self.server._proxy.Bug.comments({'ids': list(bugs.keys())}) log.debug(pretty(result)) comments = dict((int(bug), data["comments"]) for bug, data in list(result["bugs"].items())) # Create bug objects return [ self.parent.bug(bugs[id], history[id], comments[id], parent=self.parent) for id in bugs ]
def __init__(self, option, name=None, parent=None): name = "Work on {0}".format(option) StatsGroup.__init__(self, option, name, parent) for repo, path in Config().section(option): if path.endswith('/*'): try: directories = os.listdir(path[:-1]) except OSError as error: log.error("Wrong path in the [{0}] config section".format( option)) log.error(error) raise SystemExit(1) for repo_dir in sorted(directories): repo_path = path.replace('*', repo_dir) self.stats.append(GitCommits( option="{0}-{1}".format(repo, repo_dir), parent=self, path=repo_path, name="Work on {0}/{1}".format(repo, repo_dir))) else: self.stats.append(GitCommits( option=repo, parent=self, path=path, name="Work on {0}".format(repo)))
def search(self, query, pagination, result_field): """ Perform Pagure query """ result = [] url = "/".join((self.url, query)) while url: log.debug("Pagure query: {0}".format(url)) try: response = requests.get(url, headers=self.headers) log.data("Response headers:\n{0}".format(response.headers)) except requests.RequestException as error: log.error(error) raise ReportError("Pagure search {0} failed.".format(self.url)) data = response.json() objects = data[result_field] log.debug("Result: {0} fetched".format(listed( len(objects), "item"))) log.data(pretty(data)) # FIXME later: Work around https://pagure.io/pagure/issue/4057 if not objects: break result.extend(objects) url = data[pagination]['next'] return result
def search(self, query, pagination, result_field): """ Perform Pagure query """ result = [] url = "/".join((self.url, query)) while url: log.debug("Pagure query: {0}".format(url)) try: response = requests.get(url, headers=self.headers) log.data("Response headers:\n{0}".format(response.headers)) except requests.RequestException as error: log.error(error) raise ReportError("Pagure search {0} failed.".format(self.url)) data = response.json() objects = data[result_field] log.debug("Result: {0} fetched".format( listed(len(objects), "item"))) log.data(pretty(data)) # FIXME later: Work around https://pagure.io/pagure/issue/4057 if not objects: break result.extend(objects) url = data[pagination]['next'] return result
def session(self): """ Initialize the session """ if self._session is None: self._session = requests.Session() log.debug("Connecting to {0}".format(self.auth_url)) # Disable SSL warning when ssl_verify is False if not self.ssl_verify: requests.packages.urllib3.disable_warnings( InsecureRequestWarning) if self.auth_type == 'basic': basic_auth = (self.auth_username, self.auth_password) response = self._session.get( self.auth_url, auth=basic_auth, verify=self.ssl_verify) else: gssapi_auth = HTTPSPNEGOAuth(mutual_authentication=DISABLED) response = self._session.get( self.auth_url, auth=gssapi_auth, verify=self.ssl_verify) try: response.raise_for_status() except requests.exceptions.HTTPError as error: log.error(error) raise ReportError('Jira authentication failed. Try kinit.') return self._session
def _fetch_activities(self): """ Get organization activity, handle pagination """ activities = [] # Prepare url of the first page url = '{0}/organizations/{1}/activity/'.format( self.url, self.organization) while url: # Fetch one page of activities try: log.debug('Fetching activity data: {0}'.format(url)) response = requests.get(url, headers=self.headers) if not response.ok: log.error(response.text) raise ReportError('Failed to fetch Sentry activities.') data = response.json() log.data("Response headers:\n{0}".format( pretty(response.headers))) log.debug("Fetched {0}.".format(listed(len(data), 'activity'))) log.data(pretty(data)) for activity in [Activity(item) for item in data]: # We've reached the last page, older records not relevant if activity.created < self.stats.options.since.date: return activities # Store only relevant activites (before until date) if activity.created < self.stats.options.until.date: log.details("Activity: {0}".format(activity)) activities.append(activity) except requests.RequestException as error: log.debug(error) raise ReportError( 'Failed to fetch Sentry activities from {0}'.format(url)) # Check for possible next page try: url = NEXT_PAGE.search(response.headers['Link']).groups()[0] except AttributeError: url = None return activities
def search(query, parent, options): """ Perform Trac search """ # Extend the default max number of tickets to be fetched query = "{0}&max={1}".format(query, MAX_TICKETS) log.debug("Search query: {0}".format(query)) try: result = parent.proxy.ticket.query(query) except xmlrpc.client.Fault as error: log.error("An error encountered, while searching for tickets.") raise ReportError(error) except xmlrpc.client.ProtocolError as error: log.debug(error) log.error("Trac url: {0}".format(parent.url)) raise ReportError( "Unable to contact Trac server. Is the url above correct?") log.debug("Search result: {0}".format(result)) # Fetch tickets and their history using multicall multicall = xmlrpc.client.MultiCall(parent.proxy) for ticket_id in sorted(result): multicall.ticket.get(ticket_id) multicall.ticket.changeLog(ticket_id) log.debug("Fetching trac tickets and their history") result = list(multicall()) tickets = result[::2] changelogs = result[1::2] # Print debugging info for ticket, changelog in zip(tickets, changelogs): log.debug("Fetched ticket #{0}".format(ticket[0])) log.debug(pretty(ticket)) log.debug("Changelog:") log.debug(pretty(changelog)) # Return the list of ticket objects return [ Trac(ticket, changelg, parent=parent, options=options) for ticket, changelg in zip(tickets, changelogs) ]
def main(arguments=None): """ Parse options, gather stats and show the results Takes optional parameter ``arguments`` which can be either command line string or list of options. This is very useful for testing purposes. Function returns a tuple of the form:: ([user_stats], team_stats) with the list of all gathered stats objects. """ try: # Parse options, initialize gathered stats options = Options().parse(arguments) gathered_stats = [] # Check for user email addresses (command line or config) users = [did.base.User(email=email) for email in options.emails] # Prepare team stats object for data merging team_stats = UserStats(options=options) if options.merge: utils.header("Total Report") utils.item("Users: {0}".format(len(users)), options=options) # Check individual user stats for user in users: if options.merge: utils.item(user, 1, options=options) else: utils.header(user) user_stats = UserStats(user=user, options=options) user_stats.check() team_stats.merge(user_stats) gathered_stats.append(user_stats) # Display merged team report if options.merge or options.total: if options.total: utils.header("Total Report") team_stats.show() # Return all gathered stats objects return gathered_stats, team_stats except ConfigError as error: utils.info("Create at least a minimum config file {0}:\n{1}".format( did.base.Config.path(), did.base.Config().example().strip())) log.error(error) sys.exit(1) except (OptionError, ReportError) as error: log.error(error) sys.exit(1) except kerberos.GSSError as error: log.debug(error) log.error("Kerberos authentication failed. Try kinit.") sys.exit(2) except Exception as error: if "--debug" in sys.argv: raise log.error(error) sys.exit(3)