def main(repo, project): jira = JIRA(JIRA_URL, basic_auth=[JIRA_USERNAME, JIRA_PASSWORD]) print("Connection to JIRA successfully established.") print("Fetching list of matching issues...") # Get issue list for all the issues that match given project issue_list = [] for year in range(JIRA_YEAR_START, JIRA_YEAR_END + 1): jira_filter = JIRA_FILTER_TEMP.format(project=project, start=year, end=year+1) issue_list += jira.search_issues(jira_filter, maxResults=5000) # Sort issue list sorted_issue_list = list(sorted( issue_list, key=lambda i: int(i.key.split('-')[1]) )) print(f"Fetching milestones...") milestone_map = generate_milestone_map(repo, sorted_issue_list) print(f"The script will process {len(sorted_issue_list)} matching issues now.") issue = jira.issue(sorted_issue_list[0].key) for issue_key in [i.key for i in sorted_issue_list]: issue = jira.issue(issue_key) data, comments = generate_issue_data(issue, milestone_map) comments.insert(0, generate_meta_comment(issue)) download_attachments(issue) create_issue(repo, data, comments) time.sleep(REQUEST_SLEEP)
class JiraHelper(object): def __init__(self, host, user="", password=""): self.host = host self.user = user self.password = password try: if user != "" and password != "": self.jira = JIRA(host, basic_auth=(user, password)) else: self.jira = JIRA(host) except JIRAError as e: printErrorMsg('Error connecting to %s. Check if username and password are correct.' % (self.host)) printJiraErrorAndExit(e) def getSummary(self, issue): """ Gets the summary for the given ticket. """ issueData = self.jira.issue(issue) return issueData.fields.summary def getIssues(self, issues): """ Gets the issues from Jira with the given issue numbers. """ result = [] for issue in issues: result.append(self.jira.issue(issue)) return result
def update_jira(username, password): """Update time logs in Jira Current implementation uses basic authentication, future version will have better auth. For simplicity the toggl log should have issue number as timelog description. *username* to use to connect to Jira *password* for Jira authentication """ url = CONFIG.get('jira')['url'] jira = JIRA( options={'server': url}, basic_auth=(username, password)) time_logs = toggl.Connect('jira') get_logs = time_logs.get_time_logs() for each_log in get_logs: issue_id = each_log.get('description') try: issue = jira.issue(issue_id) except JIRAError: logging.warning('Failed to find issue-id {0}'.format(issue_id)) continue # Compute time in hours and round to two decimal places time_in_hours = round(each_log['duration']/(60*60), 2) jira.add_worklog(issue, time_in_hours, comment='Updated using Jira API')
def comments(task): repo = Repo(os.getcwd()) jr = JIRA({'server': JIRA_URL}, basic_auth=(JIRA_USER, JIRA_PASS)) issue = jr.issue(task or repo.head.ref.name) for comment in issue.fields.comment.comments: click.echo('-----------------------------------------------------------') click.echo(click.style(comment.author.displayName + ': \n', fg='green')) click.echo('\r' + comment.body)
class JiraHamsterListener(HamsterListener): short_name = 'jira' config_values = [ ('server_url', lambda: raw_input('Root url to your jira server [f.e. "http://jira.example.org"]\n')), ('username', lambda: raw_input('Your jira user name\n')), ('password', lambda: raw_input('Your jira password\n')), ('auto_start', lambda: raw_input('Automatically start the issue when you start the task in hamster? [y/n]\n')) ] issue_from_title = re.compile('([A-Z][A-Z0-9]+-[0-9]+)') # noinspection PyBroadException def prepare(self): server_url = self.config.get(self.short_name, 'server_url') username = self.config.get(self.short_name, 'username') password = self.config.get(self.short_name, 'password') logger.info('Connecting as "%s" to "%s"', username, server_url) self.jira = JIRA(server_url, basic_auth=(username, password)) try: self.jira.projects() except: logger.exception('Can not connect to JIRA, please check ~/.hamster-bridge.cfg') def __issue_from_fact(self, fact): """ Get the issue name from a fact :param fact: the fact to search the issue in """ fields = [fact.activity] + fact.tags logger.debug('Searching ticket in: %r', fields) for field in fields: for possible_issue in self.issue_from_title.findall(field): logger.debug('Lookup issue for activity "%s"', possible_issue) try: self.jira.issue(possible_issue) logger.debug('Found existing issue "%s" in "%s"', possible_issue, field) return possible_issue except JIRAError, e: if e.text == 'Issue Does Not Exist': logger.warning('Tried issue "%s", but does not exist. ', possible_issue) else: logger.exception('Error communicating with Jira')
def delete_issue(request, find): j_issue = JIRA_Issue.objects.get(finding=find) jira_conf = find.jira_conf() jira = JIRA(server=jira_conf.url, basic_auth=(jira_conf.username, jira_conf.password)) issue = jira.issue(j_issue.jira_id) issue.delete()
def updateIssues(issuelist, NEXTorDOTX, description): numExistingIssues = len(issuelist) if not issuelist == None else 0 if numExistingIssues > 0 : if debug: print "[DEBUG] Move " + str(numExistingIssues) + " " + description jira = JIRA(options={'server':jiraserver}, basic_auth=(jirauser, jirapwd)) cnt = 0 for s in issuelist : key = components.getText(components.findChildNodeByName(s, 'key').childNodes) issue = jira.issue(key) cnt += 1 doThisJIRA = True whichLabelSkipped = "" for label in issue.fields.labels: for skipLabel in skipLabels: if label == skipLabel.strip(): whichLabelSkipped = label doThisJIRA = False linkURL = components.getText(components.findChildNodeByName(s, 'link').childNodes) summary = components.getText(components.findChildNodeByName(s, 'summary').childNodes).strip() operation = " + [" + str(cnt) + "/" + str(len(issuelist)) + "] Update " + linkURL + " : " + summary if debug: operation = operation + " :: " + str(issue.fields.labels) if doThisJIRA == False: operation = " - [" + str(cnt) + "/" + str(len(issuelist)) + "] -Skip- " + linkURL + " (" + whichLabelSkipped + ") : " + summary print operation else: if options.autoApplyChanges or options.dryrun: print operation yesno = "" else: yesno = raw_input(operation + " ? [y/N] ") if options.autoApplyChanges or yesno.capitalize() in ["Y"]: # move issue to next fixversion if components.findChildNodeByName(s, 'project').attributes["key"].value == "JBIDE": # JBIDE or JBDS fixversion = version_jbt fixversion_NEXT = version_jbt_NEXT if NEXTorDOTX else version_jbt_DOTX else: fixversion = version_ds fixversion_NEXT = version_ds_NEXT if NEXTorDOTX else version_ds_DOTX fixVersions = [] # NOTE: if there is more than one fixversion, the others will not be changed for version in issue.fields.fixVersions: if version.name != fixversion: fixVersions.append({'name': version.name}) fixVersions.append({'name': fixversion_NEXT}) issue.update(fields={'fixVersions': fixVersions}) # only for NEXT, not for .x if NEXTorDOTX: # move issue to new sprint jira.add_issues_to_sprint(sprintId_NEXT, [key]) jira.add_comment(key, "[checkUnresolvedIssues.py] Slip to fixversion = *" + fixversion_NEXT + "* and sprint = *" + sprint_NEXT + "*") else: jira.add_comment(key, "[checkUnresolvedIssues.py] Slip to fixversion = *" + fixversion_NEXT + "*")
def get_jira_summary(server, username, password, ticketId): try: jira = JIRA(server=server, basic_auth=(username, password), options={'verify':False}) issue = jira.issue(ticketId, fields='summary') return issue.fields.summary except JIRAError, e: return "JIRA error: " + e.text
def get_jira_details(repo_history_list): output_file = open(repo_info['filename'] + '_' + since + '_' + until + '.csv', 'w') headings = ['Issue', 'Project', 'Summary', 'Status', 'Type', 'Components', 'Reporter', 'Created', 'Updated', 'Resolved', 'Epic', 'Fix Versions', 'PM Notes Internal', 'PM Notes External'] output_file.write(','.join(headings)) output_file.write('\n') print('Getting JIRA details for issues...') jira = JIRA(options, basic_auth=('noninteractive', 'etopup123')) for jira_from_repo in repo_history_list: if jira_from_repo == 'REVERT': continue try: issue = jira.issue(jira_from_repo, fields='summary,status,issuetype,components,created,updated,' 'resolutiondate,reporter,fixVersions,customfield_10008,' 'customfield_10600,customfield_11200,project') except Exception as e: print('Problem obtaining info about issue=' + jira_from_repo, str(e)) output_file.write(jira_from_repo + ',Unknown,Unknown JIRA!,,,,,,,,,,,') output_file.write('\n') continue summary = issue.fields.summary.replace(',', ' ') status = issue.fields.status.name issuetype = issue.fields.issuetype.name components = [] for component in issue.fields.components: components.append(component.name) all_components = ';'.join(components) created = parse(issue.fields.created).date().strftime("%Y-%m-%d") updated = parse(issue.fields.updated).date().strftime("%Y-%m-%d") resolved = '' if issue.fields.resolutiondate: resolved = parse(issue.fields.resolutiondate).date().strftime("%Y-%m-%d") reporter = issue.fields.reporter.displayName versions = [] for version in issue.fields.fixVersions: versions.append(version.name) all_versions = ';'.join(versions) epic = '' if issue.fields.customfield_10008: epic = issue.fields.customfield_10008 pm_internal = '' if issue.fields.customfield_10600: pm_internal = issue.fields.customfield_10600.replace(',', ' ') pm_internal = pm_internal.replace('\r\n', '|') pm_external = '' if issue.fields.customfield_11200: pm_external = issue.fields.customfield_11200.replace(',', ' ') pm_external = pm_external.replace('\r\n', '|') project_name = issue.fields.project.name try: issue_items = [jira_from_repo, project_name, summary, status, issuetype, all_components, reporter, created, updated, resolved, epic, all_versions, pm_internal, pm_external] output_file.write(','.join(issue_items)) output_file.write('\n') except Exception as e: print('JIRA field problem for issue=' + jira_from_repo, str(e))
def update_epic(eng, push_to_jira): engagement = eng prod = Product.objects.get(engagement=engagement) jpkey = JIRA_PKey.objects.get(product=prod) jira_conf = jpkey.conf if jpkey.enable_engagement_epic_mapping and push_to_jira: jira = JIRA(server=jira_conf.url, basic_auth=(jira_conf.username, jira_conf.password)) j_issue = JIRA_Issue.objects.get(engagement=eng) issue = jira.issue(j_issue.jira_id) issue.update(summary=eng.name, description=eng.name)
def update_jira_story(story_id, transition_id, comment=None, **kwargs): try: jira = JIRA("https://thezebra.atlassian.net", basic_auth=(os.environ['username'], os.environ['password'])) issue = jira.issue(story_id) allowed_transitions = {t['id'] for t in jira.transitions(issue)} if str(transition_id) not in allowed_transitions: app.logger.warn("Story %s cannot transition to %s" % (story_id, transition_id)) else: jira.transition_issue(issue, transition_id) if comment: jira.add_comment(issue, comment) except Exception as ex: app.logger.error(ex)
def build_message(self, link): msg = self.quip(link) try: jira = JIRA(self._conf['JIRA_HOST'], basic_auth=(self._conf['JIRA_LOGIN'], self._conf['JIRA_PASSWORD'])) issue = jira.issue(ticket) msg += '>>> %s' % self._escape(issue.fields.summary) except Exception as ex: if self._log: self._log.error('JIRA: %s' % (ex)) pass return msg
def add_jira_comment(jira_id, comment): if not settings.CASELINK_JIRA['ENABLE']: return False user = settings.CASELINK_JIRA['USER'] password = settings.CASELINK_JIRA['PASSWORD'] server = settings.CASELINK_JIRA['SERVER'] basic_auth = (user, password) options = { 'server': server, 'verify': False, } jira = JIRA(options, basic_auth=basic_auth) jira.add_comment(jira.issue(id=jira_id), comment) return True
def jira_command(args): channel = args['channel_name'] if channel == 'directmessage': # Channel name is "directmessage" for all direct messages, so we have # to use the channel ID to keep them separate. channel = 'dm:%s' % args['channel_id'] issue_keys = jira_key_re.findall(args['text']) if not issue_keys: return Response() log.info('Message from %s in #%s contained JIRA issue key(s): %s', args['user_name'], channel, ', '.join(issue_keys)) # Login to JIRA authinfo = ( current_app.config['JIRA_USERNAME'], current_app.config['JIRA_PASSWORD'], ) jira_url = current_app.config['JIRA_URL'] options = {'check_update': False} jira = JIRA(jira_url, basic_auth=authinfo, options=options) # Retrieve issue(s) attachments = [] for issue_key in issue_keys: try: last_mention = get_last_mention(channel, issue_key) if last_mention: log.debug('%s last mentioned in #%s at %s', issue_key, channel, last_mention) blackout = current_app.config['JIRA_ID_BLACKOUT_PERIOD'] if datetime.now() <= last_mention + blackout: continue issue = jira.issue(issue_key) attachments.append(format_attachment(issue)) except JIRAError as e: if e.status_code == 404: log.warning('%s does not exist', issue_key) else: log.error('Error looking up %s: %s', issue_key, e.text) if not attachments: return Response() return jsonify({ 'response_type': 'in_channel', 'attachments': attachments, })
class JiraWrapper(object): def __init__(self, server, user, password, timeout=5.0): self.server = server self.auth = (user, password) self.timeout = timeout options = {'server': server} self.jira = JIRA(basic_auth=(user, password), options=options) def get_ticket(self, issue_id): try: return self.jira.issue(issue_id) except Exception as e: print(e) return None
def retrun_jira_24hrs(): jira = JIRA({"server": "https://issues.couchbase.com"}) issues = jira.search_issues("created>-1d AND project=MB") return_jira_list = [] for tix in issues: #print tix bug = jira.issue(tix.key) #get the actual jira ticket with details #print bug.fields.summary #print bug.fields.components print type(bug.fields.components) print type(bug.fields.components[0]) #print "Bug is is {0} - Component - {1} - Summary - {2}".format(tix,bug.fields.components, bug.fields.summary) return_jira_list.append("Bug No - {0} - Component - {1} - Summary - {2}".format(tix,bug.fields.components, bug.fields.summary)) #return return_jira_list return return_jira_list
def _refresh_jira(url, config, issues): config = confetti.Config(json.loads(config)) jira = JIRA(url, basic_auth=(config.root.username, config.root.password)) resolution_grace = config.get('resolution_grace', 0) resolution_grace = timedelta(days=resolution_grace) for issue_obj in issues: issue = jira.issue(issue_obj.id_in_tracker) if issue.fields.resolutiondate is None: issue_obj.open = True else: resolution_date = flux.current_timeline.datetime.strptime( issue.fields.resolutiondate, '%Y-%m-%dT%H:%M:%S.%f%z') now = flux.current_timeline.datetime.now().replace(tzinfo=timezone.utc) issue_obj.open = (now - resolution_date) < resolution_grace db.session.commit()
class Jira(plugin.Plugin): """Provide access to the Jira API""" def __init__(self, irc): self.irc = irc self.name = self.__class__.__name__ self.jira = utils.get_config("Jira") self.jira_domain = self.jira.get("domain") self.jira_username = self.jira.get("username") self.jira_password = self.jira.get("password") self.jira = JIRA(self.jira_url, basic_auth=(self.jira_username, self.jira_password)) @plugin.hook_add_keyword("jira") @utils.require_params @utils.spawn def keyword_jira(self, message, params=None, **kwargs): """Retrieve Jira ticket information (ex: jira NCP-1444)""" params = utils.ensure_int(params) if not params: return self._find_issue(message, params) def _find_issue(self, message, issue_id): """Find and display a Jira issue""" try: issue = self.jira.issue(issue_id) except Exception: return msg = "JIRA %s #%s: %s [S: %s, P: %s, L: %s, A: %s]" % ( issue.fields.tracker.name, issue.id, issue.fields.summary, issue.fields.status.name, issue.fields.priority.name, ",".join(issue.fields.labels), issue.get("fields", {}).get("assignee", "N/A")) url = "https://%s/browse/%s" % (self.jira_url, issue.key) message.dispatch("%s %s" % (msg, url))
def add_issue(find, push_to_jira): eng = Engagement.objects.get(test=find.test) prod = Product.objects.get(engagement= eng) jpkey = JIRA_PKey.objects.get(product=prod) jira_conf = jpkey.conf if push_to_jira: if 'Active' in find.status() and 'Verified' in find.status(): try: JIRAError.log_to_tempfile=False jira = JIRA(server=jira_conf.url, basic_auth=(jira_conf.username, jira_conf.password)) if jpkey.component: new_issue = jira.create_issue(project=jpkey.project_key, summary=find.title, components=[{'name': jpkey.component}, ], description=jira_long_description(find.long_desc(), find.id, jira_conf.finding_text), issuetype={'name': jira_conf.default_issue_type}, priority={'name': jira_conf.get_priority(find.severity)}) else: new_issue = jira.create_issue(project=jpkey.project_key, summary=find.title, description=jira_long_description(find.long_desc(), find.id, jira_conf.finding_text), issuetype={'name': jira_conf.default_issue_type}, priority={'name': jira_conf.get_priority(find.severity)}) j_issue = JIRA_Issue(jira_id=new_issue.id, jira_key=new_issue, finding=find) j_issue.save() issue = jira.issue(new_issue.id) #Add labels (security & product) add_labels(find, new_issue) #Upload dojo finding screenshots to Jira for pic in find.images.all(): jira_attachment(jira, issue, settings.MEDIA_ROOT + pic.image_large.name) #if jpkey.enable_engagement_epic_mapping: # epic = JIRA_Issue.objects.get(engagement=eng) # issue_list = [j_issue.jira_id,] # jira.add_issues_to_epic(epic_id=epic.jira_id, issue_keys=[str(j_issue.jira_id)], ignore_epics=True) except JIRAError as e: log_jira_alert(e.text, find) else: log_jira_alert("Finding not active or verified.", find)
def update_issue(find, old_status, push_to_jira): prod = Product.objects.get(engagement=Engagement.objects.get(test=find.test)) jpkey = JIRA_PKey.objects.get(product=prod) jira_conf = jpkey.conf if push_to_jira: j_issue = JIRA_Issue.objects.get(finding=find) try: JIRAError.log_to_tempfile=False jira = JIRA(server=jira_conf.url, basic_auth=(jira_conf.username, jira_conf.password)) issue = jira.issue(j_issue.jira_id) fields={} # Only update the component if it didn't exist earlier in Jira, this is to avoid assigning multiple components to an item if issue.fields.components: log_jira_alert("Component not updated, exists in Jira already. Update from Jira instead.", find) else: #Add component to the Jira issue component = [{'name': jpkey.component},] fields={"components": component} #Upload dojo finding screenshots to Jira for pic in find.images.all(): jira_attachment(jira, issue, settings.MEDIA_ROOT + pic.image_large.name) issue.update(summary=find.title, description=jira_long_description(find.long_desc(), find.id, jira_conf.finding_text), priority={'name': jira_conf.get_priority(find.severity)}, fields=fields) #Add labels(security & product) add_labels(find, issue) except JIRAError as e: log_jira_alert(e.text, find) req_url =jira_conf.url+'/rest/api/latest/issue/'+ j_issue.jira_id+'/transitions' if 'Inactive' in find.status() or 'Mitigated' in find.status() or 'False Positive' in find.status() or 'Out of Scope' in find.status() or 'Duplicate' in find.status(): if 'Active' in old_status: json_data = {'transition':{'id':jira_conf.close_status_key}} r = requests.post(url=req_url, auth=HTTPBasicAuth(jira_conf.username, jira_conf.password), json=json_data) elif 'Active' in find.status() and 'Verified' in find.status(): if 'Inactive' in old_status: json_data = {'transition':{'id':jira_conf.open_status_key}} r = requests.post(url=req_url, auth=HTTPBasicAuth(jira_conf.username, jira_conf.password), json=json_data)
def integrate_all_old(jira_login, enc_jira_password): """ Add comment to all relevant historical JIRA tickets """ jira = JIRA(options={'server': 'https://jira.genesys.com'}, basic_auth=(jira_login, decryption(enc_jira_password))) link_hgweb_static = app.config["HG_PROD"] + "/rev/" reviews = Review.query.filter(Review.status == "MERGED").all() for review in reviews: changeset = review.changesets[0] for token in token_search(changeset.title): try: issue = jira.issue(token) if not comment_added(changeset.sha1, issue.fields.comment.comments): link_hgweb = link_hgweb_static + changeset.sha1 jira_comment(jira, token, changeset.owner, changeset.created_date, 'IWD', changeset.review.target, link_hgweb, changeset.review_id, changeset.title) except JIRAError as e: if e.status_code == 404: print "Issue does not exist (url: {url})".format(url=e.url)
jira = JIRA(jira_url, basic_auth=(username, password)) query = 'project = CL AND status != Done and status != Rejected' num = 0 while True: query_result = jira.search_issues(query,maxResults=500,startAt=num) num += 500 if num > query_result.total: break for issue in query_result: issue_timespent = 0 issue_count = 0 for link_id in issue.fields.issuelinks: inwardIssue = jira.issue_link(link_id).inwardIssue if inwardIssue.key.startswith('OP-'): issue_count += 1 time = jira.issue(inwardIssue.key).fields.timespent if time is not None: issue_timespent += time total_time = issue_timespent / 60 # $65 per hour for Ops total_money = round(total_time / 60 * 65, 2) if issue_timespent != 0: print("%s: %s minutes, %s linked tickets, %s dollars, %s" % ( issue.key, total_time, issue_count, total_money, issue.fields.summary))
def update_tickets_from_git(last=None, current=None): """ Run during a deployment. Looks at all commits between now and the last deployment. Finds all ticket numbers and updates their status in Jira. """ from jira import JIRA, JIRAError from burlap.deploy import get_last_current_diffs from burlap.git import gittracker get_current_commit = gittracker.get_current_commit GITTRACKER = gittracker.name.upper() dryrun = common.get_dryrun() verbose = common.get_verbose() # Ensure this is only run once per role. if env.host_string != env.hosts[-1]: return if not env.jira_update_from_git: return if not env.jira_ticket_pattern: return if not env.jira_basic_auth_username or not env.jira_basic_auth_password: return # During a deployment, we should be given these, but for testing, # lookup the diffs dynamically. if not last or not current: last, current = get_last_current_diffs(GITTRACKER) if verbose: print('-' * 80) print('last.keys:', last.keys()) print('-' * 80) print('current.keys:', current.keys()) try: last_commit = last['GITTRACKER']['current_commit'] except KeyError: return current_commit = current['GITTRACKER']['current_commit'] # Find all tickets deployed between last deployment and now. tickets = get_tickets_between_commits(current_commit, last_commit) if verbose: print('tickets:', tickets) # Update all tickets in Jira. jira = JIRA({'server': env.jira_server}, basic_auth=(env.jira_basic_auth_username, env.jira_basic_auth_password)) for ticket in tickets: # Mention this Jira updated. comment = env.jira_ticket_update_message_template % dict( role=env.ROLE.lower()) print('Commenting on ticket %s: %s' % (ticket, comment)) if not dryrun: jira.add_comment(ticket, comment) # Update ticket status. recheck = False while 1: print('Looking up jira ticket %s...' % ticket) issue = jira.issue(ticket) print('Ticket %s retrieved.' % ticket) transition_to_id = dict( (t['name'], t['id']) for t in jira.transitions(issue)) print('%i allowable transitions found: %s' % (len(transition_to_id), ', '.join(transition_to_id.keys()))) next_transition_name = env.jira_deploy_workflow.get( issue.fields.status.name.title()) next_transition_id = transition_to_id.get(next_transition_name) if next_transition_name: new_fields = {} # print('jira_assignee_by_status:', env.jira_assignee_by_status, issue.fields.status.name.title() new_assignee = env.jira_assignee_by_status.get( #issue.fields.status.name.title(), next_transition_name, issue.fields.assignee.name, ) if new_assignee == 'reporter': new_assignee = issue.fields.reporter.name # print('new_assignee:', new_assignee) print('Updating ticket %s to status %s and assigning it to %s.' \ % (ticket, next_transition_name, new_assignee)) if not dryrun: try: jira.transition_issue( issue, next_transition_id, ) recheck = True except AttributeError as e: print('Unable to transition ticket %s to %s: %s' \ % (ticket, next_transition_name, e), file=sys.stderr) # Note assignment should happen after transition, since the assignment may # effect remove transitions that we need. try: if new_assignee: print('Assigning ticket %s to %s.' % (ticket, new_assignee)) jira.assign_issue(issue, new_assignee) else: print('No new assignee found.') except JIRAError as e: print('Unable to reassign ticket %s to %s: %s' \ % (ticket, new_assignee, e), file=sys.stderr) else: recheck = False print('No transitions found for ticket %s currently in status "%s".' \ % (ticket, issue.fields.status.name)) if not recheck: break
keyStats = [] timeStats = [] hasPushStats = [] count =0 while True: start_idx = block_num*block_size issues = jira.search_issues(GLOBAL_jqlExec, start_idx,block_size) if len(issues) == 0: # Retrieve issues until there are no more to come break block_num += 1 for issue in issues: startExec = time.time() if not ignoreList or (ignoreList and issue.key not in ignoreList): logging.info(issue.key + " " + issue.fields.summary) cloudIssue = jiraCloud.issue(issue.key,fields="\"" + GLOBAL_testEnvironmentsId +"\"") onPremiseEnvType = getattr(issue.fields,"customfield_" + GLOBAL_testEnvironmentsId) onPremiseEnvType = str(onPremiseEnvType).replace('\'','\"') if commandExecutionExceptions.export != None: issuesScanned.append(issue.key) if onPremiseEnvType !="[]": mutation = addTestEnvironmentsToTestExecution(headers,mutation,cloudIssue.id, onPremiseEnvType) if commandExecutionExceptions.export != None and mutation == "": ## Limit of mutation items has been reach and was push when adding to mutation for item in issuesScanned: fileExport.write("%s\n" % item) issuesScanned.clear() hasPushStats.append("1") else: hasPushStats.append("0") else:
class jira(object): def __init__(self, server, ID, PW, LOG=None): self.jira = JIRA(SERVER[server], basic_auth=(ID, PW)) if LOG == None: self.LOG = print else: self.LOG = LOG.info def searchIssues(self, filter): # filter가 비어있는 경우에 jira는 모든 이슈를 검색해버린다. 주의할것. time.sleep(DEFAULT_DELAY) # if 'None' in filter: # return None if 'None' in filter: self.LOG('필터없음') # print('필터없음') return None try: self.LOG('서치 시작') # print('서치 시작') issues = self.jira.search_issues( filter, maxResults=400) # default 갯수는 50임/ 400이상으로 늘리는건 error발생하는 듯 except Exception: self.LOG('searchIssues restart') # print('searchIssues restart') time.sleep(10) return self.searchIssues(filter) return issues def findIssue(self, issueNo): time.sleep(DEFAULT_DELAY) try: issue = self.jira.issue(issueNo) except: time.sleep(10) return self.findIssue(issueNo) return issue def addComment(self, issue, cmt): time.sleep(DEFAULT_DELAY) try: self.jira.add_comment(issue, cmt) # issue.update(notify=False, comment=cmt) # pass except: time.sleep(10) self.addComment(issue, cmt) # pass def uploadComment(self, issue, cmt, no=0): comments = issue.fields.comment.comments if len(comments) > 0: time.sleep(DEFAULT_DELAY) try: comments[len(comments) - 1 - no].update(body=cmt) except Exception as e: self.LOG('error 발생, comment 수정권한 문제일 수 있습니다.', e) else: self.LOG('{0}이슈에는 comment가 없습니다.'.format(issue.key)) return None def deleteComment(self, issue, no=0): comments = issue.fields.comment.comments if len(comments) > 0: time.sleep(DEFAULT_DELAY) try: comments[len(comments) - 1 - no].delete() except Exception as e: self.LOG('error 발생, comment 수정권한 문제일 수 있습니다.', e) else: self.LOG('{0}comment가 없습니다.'.format(issue.key)) return None def readComment( self, issue, no=0): # no는 마지막에서 몇번째 커멘트인지를 나타냄, 0이면 마지막 커멘트, 음수이면 모든 커멘트를 리턴함. comments = issue.fields.comment.comments if len(comments) > 0: if no < 0: return comments else: return comments[len(comments) - 1 - no] else: self.LOG('{0}이슈에는 comment가 없습니다.'.format(issue.key)) return None def getAssignee(self, issue): time.sleep(DEFAULT_DELAY) try: assignee = self.jira.issue(issue.key, fields='assignee').fields.assignee except: time.sleep(10) return self.getAssignee(issue) return assignee def getReporter(self, issue): time.sleep(DEFAULT_DELAY) try: reporter = self.jira.issue(issue.key, fields='reporter').fields.reporter except: time.sleep(10) return self.getReporter(issue) return reporter
class JiraAPI: """docstring""" def __init__(self, server=None, login=None, password=None, jira=None, userProjects=[], boardStatuses=[]): self.server = server self.login = login self.password = password self.jira = jira self.userProjects = userProjects self.boardStatuses = boardStatuses def get_projects_with_yesterday_worklogs(self, project): issues = self.jira.search_issues('worklogAuthor = currentUser() AND project={key} AND worklogDate = {yesterday}'.format(key=project.key, yesterday=self.get_yesterday())) if (issues != []): self.userProjects.append({'key': project.key, 'name': project.name}) return self.userProjects def get_server_board_statuses(self): self.boardStatuses = self.jira.statuses() return self.boardStatuses def get_info_user_projects(self): threads = [] allProjects = self.jira.projects() for project in allProjects: threads.append(threading.Thread(target=self.get_projects_with_yesterday_worklogs, args=(project,))) for thread in threads: thread.start() def auth_in_jira(self, login, password): self.jira = JIRA(basic_auth=('{0}'.format(login), '{0}'.format(password)), options={'server': '{0}'.format(self.server)}, max_retries=0) self.get_server_board_statuses() self.get_info_user_projects() def set_server(self, server): self.server = server return server def get_server(self): return self.server def get_worklogs(self, issues): worklogs = [] for issue in issues: worklogs.append(self.jira.issue(str(issue.key))) return worklogs def handler_worklogs(self, worklogs): yesterday = self.get_yesterday() message = '' for worklog in worklogs: if yesterday == worklog.created[0:10]: message += '{0}({1}). '.format(worklog.comment.strip(), worklog.timeSpent) return message def get_yesterday(self): today = datetime.date.today() if today.weekday() == 0: return str(today - datetime.timedelta(days=3)) elif today.weekday() == 6: return str(today - datetime.timedelta(days=2)) else: return str(today - datetime.timedelta(days=1)) def get_yesterday_worklog_issues(self): yesterday_issues_worklogs = self.jira.search_issues('worklogAuthor = currentUser() AND worklogDate = "{yesterday}"'.format(yesterday=self.get_yesterday())) return self.get_worklogs(yesterday_issues_worklogs) def get_today_issues(self, data): today_issues = self.jira.search_issues('assignee = currentuser() AND project = "{project}" AND sprint in openSprints() AND worklogAuthor = currentUser() AND status in ({statuses})'.format(project=data['project'], statuses=data['statuses'])) return today_issues def show_projects(self): return self.userProjects def get_projects(self): return self.userProjects def generate_standup(self, data): issues_with_worklogs = self.get_yesterday_worklog_issues() today_issues_with_worklogs = self.get_today_issues(data) yesterday = '' today = '' for issue in issues_with_worklogs: yesterday += '- {0} {1}/browse/{2}\n'.format(self.handler_worklogs(issue.fields.worklog.worklogs), self.server, issue.key) for issue in today_issues_with_worklogs: today += '- {0} {1}/browse/{2}\n'.format(issue.fields.summary, self.server, issue.key) standup = 'Доброе утро!\n\n*Вчера*\n{0}\n*Сегодня*\n{1}\n\n*Проблемы*\n{2}'.format(yesterday, today, '- Нет проблем!') return standup
default='user', argparse_help='Facility code for the syslog.', argparse_names=['-f', '--facility']) schema = Config() config = Confiture.from_filename( 'test_ressources/tests_jira_quart_config.conf', schema=schema).parse() jira = JIRA(options={'server': config.subsection('jira').get('url')}, basic_auth=(str(config.subsection('jira').get('user')), str(config.subsection('jira').get('password')))) #Test find_the_open_issue #Get different issue types from Jira (needs regular updates # for test to work) canceled_issue = jira.issue('VUMA-511') awaiting_validation = jira.issue('VUMA-512') awaiting_fix = jira.issue('VUMA-513') in_progress = jira.issue('VUMA-514') more_info = jira.issue('VUMA-515') done = jira.issue('VUMA-516') @pytest.mark.parametrize( 'issue_list, result', (([], None), ([canceled_issue], None), ([canceled_issue, awaiting_fix], awaiting_fix), ([awaiting_validation, canceled_issue], awaiting_validation), ([in_progress], in_progress), ([more_info], more_info), ([done], None))) def test_find_the_open_issue(issue_list, result): assert quart.jira_quart.find_the_open_issue(issue_list) == result
def update_issue(find, old_status, push_to_jira): prod = Product.objects.get(engagement=Engagement.objects.get( test=find.test)) jpkey = JIRA_PKey.objects.get(product=prod) jira_conf = jpkey.conf if push_to_jira: j_issue = JIRA_Issue.objects.get(finding=find) try: JIRAError.log_to_tempfile = False jira = JIRA(server=jira_conf.url, basic_auth=(jira_conf.username, jira_conf.password)) issue = jira.issue(j_issue.jira_id) fields = {} # Only update the component if it didn't exist earlier in Jira, this is to avoid assigning multiple components to an item if issue.fields.components: log_jira_alert( "Component not updated, exists in Jira already. Update from Jira instead.", find) else: #Add component to the Jira issue component = [ { 'name': jpkey.component }, ] fields = {"components": component} #Upload dojo finding screenshots to Jira for pic in find.images.all(): jira_attachment(jira, issue, settings.MEDIA_ROOT + pic.image_large.name) issue.update( summary=find.title, description=jira_long_description(find.long_desc(), find.id, jira_conf.finding_text), priority={'name': jira_conf.get_priority(find.severity)}, fields=fields) #Add labels(security & product) add_labels(find, issue) except JIRAError as e: log_jira_alert(e.text, find) req_url = jira_conf.url + '/rest/api/latest/issue/' + j_issue.jira_id + '/transitions' if 'Inactive' in find.status() or 'Mitigated' in find.status( ) or 'False Positive' in find.status( ) or 'Out of Scope' in find.status() or 'Duplicate' in find.status(): if 'Active' in old_status: json_data = {'transition': {'id': jira_conf.close_status_key}} r = requests.post(url=req_url, auth=HTTPBasicAuth(jira_conf.username, jira_conf.password), json=json_data) elif 'Active' in find.status() and 'Verified' in find.status(): if 'Inactive' in old_status: json_data = {'transition': {'id': jira_conf.open_status_key}} r = requests.post(url=req_url, auth=HTTPBasicAuth(jira_conf.username, jira_conf.password), json=json_data)
class TestIssues(unittest.TestCase): def setUp(self): self.jira = JIRA(options=dict(server=TEST_URL, verify=False), basic_auth=(TEST_USERNAME, TEST_PASSWORD)) self.issue1 = self.jira.create_issue( project='KB', summary='Test-1', issuetype={'name': 'Bug'}, ) self.issue2 = self.jira.create_issue( project='KB', summary='Test-2', issuetype={'name': 'Bug'}, ) def tearDown(self): issues = self.jira.search_issues('project = "KB" AND summary ~ "Test*"', fields=['key']) for _ in issues: _.delete() def assert_single_attachment(self): # TODO - Find how to test this automatically pass def assert_single_comment_with(self, text): comments = self.jira.comments(self.issue1.key) self.assertEqual(len(comments), 1) self.assertIn(text, comments[0].body) def test_new(self): result = CliRunner().invoke(topcli, ['issue', 'new', 'KB', 'task', 'Test-new']) self.assertEqual(result.exit_code, 0) issues = self.jira.search_issues('project = "KB" AND summary ~ "Test-new"', fields=['key', 'summary']) self.assertEqual(len(issues), 1) self.assertIn(issues[0].key, result.output) def test_transition(self): result = CliRunner().invoke(topcli, ['issue', 'transition', self.issue1.key, 'Done']) self.assertEqual(result.exit_code, 0) def test_assign(self): result = CliRunner().invoke(topcli, ['issue', 'assign', self.issue1.key, TEST_USERNAME]) self.assertEqual(result.exit_code, 0) assignee = self.jira.issue(self.issue1.key, fields=['assignee']).fields.assignee self.assertEqual(assignee.key, TEST_USERNAME) def test_unassign(self): result = CliRunner().invoke(topcli, ['issue', 'assign', self.issue1.key, TEST_USERNAME]) result = CliRunner().invoke(topcli, ['issue', 'unassign', self.issue1.key]) self.assertEqual(result.exit_code, 0) assignee = self.jira.issue(self.issue1.key, fields=['assignee']).fields.assignee self.assertIsNone(assignee) def test_attach_file(self): with CliRunner().isolated_filesystem() as dir_path: with open('data.txt', 'w') as f: print('abc', file=f) result = CliRunner().invoke(topcli, ['issue', 'attach', self.issue1.key, 'data.txt']) self.assertEqual(result.exit_code, 0) self.assert_single_attachment() def test_comment_args(self): result = CliRunner().invoke(topcli, ['issue', 'comment', self.issue1.key, 'Comment', 'from args']) self.assertEqual(result.exit_code, 0) self.assert_single_comment_with('Comment from args') def test_comment_file(self): with CliRunner().isolated_filesystem() as dir_path: with open('comment.txt', 'w') as f: print('Comment from file', file=f) result = CliRunner().invoke(topcli, ['issue', 'comment', self.issue1.key, 'comment.txt']) self.assertEqual(result.exit_code, 0) self.assert_single_comment_with('Comment from file') def test_comment_prompt(self): result = CliRunner().invoke(topcli, ['issue', 'comment', self.issue1.key], input='Comment from prompt\n') self.assertEqual(result.exit_code, 0) self.assert_single_comment_with('Comment from prompt') def test_comment_stdin(self): result = CliRunner().invoke(topcli, ['issue', 'comment', self.issue1.key, '-'], input='Comment\nfrom\nstdin') self.assertEqual(result.exit_code, 0) self.assert_single_comment_with('Comment\nfrom\nstdin') def test_link(self): result = CliRunner().invoke(topcli, ['issue', 'link', self.issue1.key, self.issue2.key, '-t', 'duplicates']) self.assertEqual(result.exit_code, 0) links = self.jira.issue(self.issue1.key, fields=['issuelinks']).fields.issuelinks self.assertEqual(len(links), 1) self.assertEqual(links[0].outwardIssue.key, self.issue2.key) self.assertEqual(links[0].type.outward, 'duplicates') def test_unlink(self): result = CliRunner().invoke(topcli, ['issue', 'link', self.issue1.key, self.issue2.key, '-t', 'duplicates']) self.assertEqual(result.exit_code, 0) result = CliRunner().invoke(topcli, ['issue', 'unlink', self.issue1.key, self.issue2.key]) links = self.jira.issue(self.issue1.key, fields=['issuelinks']).fields.issuelinks self.assertEqual(len(links), 0) def test_search_issue(self): result = CliRunner().invoke(topcli, ['issue', 'search']) self.assertEqual(result.exit_code, 0) self.assertIn('KB-1', result.output) self.assertIn('KB-2', result.output) self.assertIn('KB-3', result.output)
else : print("{} Build user {} is not in priority list".format(stime,jenkinsuser)) print("Please provide valid JiraTicketId") exit(0) #If build user is not in priority list else: #jira url server = 'https://example.atlassian.net' options = { 'server': server } jira = JIRA(options, basic_auth=(user,apikey)) #Jira Api authentication try: issue = jira.issue(JiraTicketId) #Fetch Ticket from ticket ID getassignee = issue.fields.assignee.displayName #Fetching assignee from ticket assign = unicodedata.normalize('NFKD', getassignee).encode('ascii','ignore') status = issue.fields.status ticketType = issue.fields.issuetype #Fetching Severity field by name allfields = jira.fields() nameMap = {jira.field['name']:jira.field['id'] for jira.field in allfields} getvalue = getattr(issue.fields, nameMap["Severity"]) except: print("{} Ticket ID is wrong or you don't have permission to see it.".format(stime)) #---------------fetching email of assignee ---------- def ExceljiraRead(): try: #mentioned excel should have only read permision in jenkin server
timeStats = [] count = 0 while True: start_idx = block_num * block_size issues = jira.search_issues(GLOBAL_jqlTestandPreCond, start_idx, block_size) if len(issues) == 0: # Retrieve issues until there are no more to come break block_num += 1 mutation = "" for issue in issues: startExec = time.time() logging.info(issue.key + " " + issue.fields.summary) try: cloudTest = jiraCloud.issue(issue.key) except JIRAError as e: logging.error("Failed to retrieve Issue " + issue.key + " from Jira Cloud connected returned with status " + str(e.status_code) + " and message " + e.text + " Skipping to next Issue ") continue r = requests.get( '' + GLOBAL_onPremiseURL + '/rest/raven/1.0/api/test/' + issue.key + '/preconditions', auth=HTTPBasicAuth(GLOBAL_basic_auth_user, GLOBAL_basic_auth_pass)) if r.text != "[]": json_data_pre = json.loads(r.text) for pre_cond in json_data_pre: composeKey = issue.key + ":" + pre_cond["key"] if not ignoreList or (ignoreList
timeStats = [] hasPushStats = [] count = 0 while True: start_idx = block_num*block_size issues = jira.search_issues(GLOBAL_jqlGeneric, start_idx,block_size) if len(issues) == 0: # Retrieve issues until there are no more to come break block_num += 1 for issue in issues: startExec = time.time() if not ignoreList or (ignoreList and issue.key not in ignoreList): logging.info(issue.key + " " + issue.fields.summary) try: cloudIssue = jiraCloud.issue(issue.key,fields="\"" + GLOBAL_testType +"\",\"" + GLOBAL_genericTestDefinition + "\"") except: logging.error("An error as occurred - Skipping " + issue.key +" Test") logging.error(sys.exc_info()) continue testType = getattr(issue.fields,"customfield_" + GLOBAL_testType) testType = testType.value testDefiniton = getattr(issue.fields,"customfield_" + GLOBAL_genericTestDefinition) if not testDefiniton: testDefiniton="" if commandExecutionExceptions.export != None: issuesScanned.append(issue.key) mutation = addToGenericTestUpdate(headers,cloudIssue.id,testType,testDefiniton,mutation,issue.key.replace("-","")) if commandExecutionExceptions.export != None and mutation == "": ## Limit of mutation items has been reach and was push when adding to mutation for item in issuesScanned:
print("LOG: 바로 2회 라벨링. 왜 1회 누락되었는지 점검해주세요!") elif delta > 6 & delta <= 13: issue_labels.append('Status_Summary1회미입력') print("LOG: 1회 라벨링") issue.update(fields={"labels": issue_labels}) print("LOG: ", str(issue_labels)) if __name__ == "__main__": # jira Handle open jira = JIRA(DevTracker, basic_auth=(ID, PASSWORD)) #filter=49494 : 1_webOS_5.0_Epic_OverDue_Initiative_pm # issue_search_convert = jira.search_issues("filter=49607", maxResults=1000, expand="changelog") issue_search_convert = jira.search_issues("filter=49520", maxResults=1000, expand='changelog') print("### status summary 업데이트 누락 이니셔티브 ###") # Create New Jira Tickets for key in issue_search_convert: issue = jira.issue(key) issue_key = issue.key issue_assignee = issue.fields.assignee.name # initiative Owner issue_summary = issue.fields.summary issue_changelog = issue.changelog print('{} : {} ({})'.format(issue_key, issue_summary, issue_assignee)) findStatusSummaryUpdate(issue, issue_changelog) # raw = "2019-05-12T10:11:11" # timeConvertor(raw)
class LogJammin: mode = 'date' current_date = None parse_only = False logs = [] tickets = [] jira = None time_zone = None now = None def __init__(self, filename, parse_only): self.parse_only = parse_only try: config = self.load_config() self.time_zone = timezone(config['time_zone']) self.now = self.time_zone.localize(datetime.now()) if not filename: if 'log_file' not in config or not config['log_file']: raise Exception('Log file not set') filename = config['log_file'] filename = realpath(expanduser(filename)) except Exception as e: self.exit_with_error(e) if not self.parse_only: print('Connecting to JIRA...', end='', flush=True) try: self.jira = JIRA(server=config['host'], basic_auth=(config['user'], config['password'])) except Exception as e: self.exit_with_error(e) print('\033[92mdone\033[0m') print('Loading logs...', end='', flush=True) try: self.load_logs(filename) except Exception as e: self.exit_with_error(e) print('\033[92mdone\033[0m') if not len(self.logs): self.exit_with_error('No logs found') self.print_summary() if not self.parse_only: while True: run = input('Upload logs to JIRA? (y/n): ').lower().strip() if run == 'n': self.exit_with_success() elif run == 'y': break try: for (i, log) in enumerate(self.logs): print('Saving log {}/{}: ({})...'.format( i + 1, len(self.logs), self.format_log(log)), end='', flush=True) self.upload_log(log) print('\033[92mdone\033[0m') except Exception as e: self.exit_with_error(e) self.exit_with_success() def print_summary(self): logs_by_date = OrderedDict() total_minutes = 0 print('\033[94m{}\033[0m'.format(80 * '=')) print('\033[93mSummary:\033[0m') for log in self.logs: date = log['date'].strftime('%Y-%m-%d') if date not in logs_by_date: logs_by_date[date] = {'logs': [], 'total_time_minutes': 0} logs_by_date[date]['logs'].append(log) logs_by_date[date][ 'total_time_minutes'] += 60 * log['time']['hours'] logs_by_date[date]['total_time_minutes'] += log['time']['minutes'] for date, summary in logs_by_date.items(): print('\n\033[93m{}\033[0m'.format(date)) hours = math.floor(summary['total_time_minutes'] / 60) minutes = math.floor(summary['total_time_minutes'] % 60) total_minutes += summary['total_time_minutes'] for log in summary['logs']: time = self.format_time(log['time']['hours'], log['time']['minutes']) description = '({})'.format( log['description']) if log['description'] else '' print(' {}: {} {}'.format(log['ticket'], time, description)) print('\033[93mTotal: {} logs, {}\033[0m'.format( len(summary['logs']), self.format_time(hours, minutes))) summary_hours = math.floor(total_minutes / 60) summary_minutes = math.floor(total_minutes % 60) print('\n\033[93mSum Total: {} days, {} logs, {}\033[0m'.format( len(logs_by_date), len(self.logs), self.format_time(summary_hours, summary_minutes))) print('\033[94m{}\033[0m'.format(80 * '=')) def exit_with_success(self): print('\033[92mDone\033[0m') exit() def exit_with_error(self, e): print('\n\033[91m{}\033[0m'.format(str(e))) exit(1) def format_log(self, log): return 'date={}, ticket={}, time={}, description={}'.format( log['date'].strftime('%Y-%m-%d'), log['ticket'], self.format_time(log['time']['hours'], log['time']['minutes']), log['description']) def format_time(self, hours, minutes): time_str = '' if hours: time_str += '{}h '.format(hours) if minutes: time_str += '{}m'.format(minutes) return time_str.strip() def load_config(self): try: required_keys = ['user', 'password', 'host', 'time_zone'] with open(expanduser('~/.logjammin')) as f: config = json.load(f) for key in required_keys: if key not in config: raise Exception('missing key \'{}\''.format(key)) return config except Exception as e: raise Exception( 'Error parsing ~/.logjammin: {}'.format(e)) from None def load_logs(self, filename): line_no = 0 loading_pct = 0 with open(filename, 'r') as fp: lines = fp.read().splitlines() for line in lines: line_no += 1 stripped_line = line.strip() if not len(stripped_line): continue if stripped_line.startswith('//') or stripped_line.startswith('#'): continue try: self.parse_line(stripped_line) except Exception as e: raise Exception('Error on line {}: {}'.format( line_no, str(e))) from None prev_loading_pct = loading_pct loading_pct = math.floor(line_no / len(lines) * 100) print('{}{}%'.format( '\b' * (len(str(prev_loading_pct)) + 1 if prev_loading_pct else 0), loading_pct), end='', flush=True) if len(lines): print('\b' * 4, end='', flush=True) # 100% self.logs.sort(key=lambda k: (k['date'], k['ticket'].split('-')[0], int(k['ticket'].split('-')[1]))) def upload_log(self, log): time_spent = '{}h {}m'.format(log['time']['hours'], log['time']['minutes']) kwargs = {'comment': log['description']} if log['description'] else {} self.jira.add_worklog(issue=log['ticket'], timeSpent=time_spent, started=log['date'], **kwargs) def parse_line(self, line): normalized_line = re.sub(r'\s+', ' ', line.strip()) if self.mode == 'date': try: self.current_date = self.parse_date_line(normalized_line) self.mode = 'time_log' except Exception as e: raise Exception('String \'{}\' is invalid: {}'.format( line, str(e))) from None elif self.mode == 'time_log': try: ticket, time, description = self.parse_time_log_line( normalized_line) self.add_log(ticket, time, description) self.mode = 'date_or_time_log' except Exception as e: raise Exception('String \'{}\' is invalid: {}'.format( line, e)) from None elif self.mode == 'date_or_time_log': try: self.mode = 'date' return self.parse_line(line) except Exception as e: try: self.mode = 'time_log' return self.parse_line(line) except Exception as e: raise Exception('String \'{}\' is invalid: {}'.format( line, str(e))) from None else: raise Exception('Invalid mode \'{}\''.format(self.mode)) def parse_date_line(self, line): date_match = re.match( r'^(?P<year>\d{4})-?(?P<month>\d{2})-?(?P<day>\d{2})$', line) if not date_match: raise Exception('Pattern not matched') date = self.time_zone.localize( datetime(int(date_match.group('year')), int(date_match.group('month')), int(date_match.group('day')))) if date > self.now: raise Exception('Date is in the future') return date def parse_time_log_line(self, line): parts = line.split(',', 2) ticket_str = parts[0].strip() if len(parts) else '' time_str = parts[1].strip() if len(parts) > 1 else '' description = parts[2].strip() if len(parts) > 2 else '' ticket_match_re = r'^[A-Z][A-Z0-9]+-\d+$' ticket_match = re.match(ticket_match_re, ticket_str, re.IGNORECASE) if not ticket_match: raise Exception('Ticket pattern not matched') ticket = ticket_match.group(0).upper() hours = 0 minutes = 0 dec_hours_match_re = '^(\d+\.\d+|\.\d+|\d+)\s*H?$' dec_hours_match = re.match(dec_hours_match_re, time_str, re.IGNORECASE) if dec_hours_match: dec_hours = float(dec_hours_match.group(1)) hours = math.floor(dec_hours) minutes = math.floor(60 * (dec_hours % 1)) else: hours_mins_match_re = r'((?P<hours>\d+)\s*H)?\s*((?P<minutes>\d+)\s*M)?' hours_mins_match = re.match(hours_mins_match_re, time_str, re.IGNORECASE) if hours_mins_match: hours = int(hours_mins_match.group('hours') or 0) minutes = int(hours_mins_match.group('minutes') or 0) if not hours and not minutes: raise Exception('Time pattern not matched') if not self.parse_only: self.assert_ticket_exists(ticket) time = (hours, minutes) return (ticket, time, description) def assert_ticket_exists(self, ticket): if ticket in self.tickets: return try: self.jira.issue(ticket, fields='key') self.tickets.append(ticket) except Exception as e: raise Exception( 'Failed to get ticket info for {}'.format(ticket)) from None def add_log(self, ticket, time, description): self.logs.append({ 'date': self.current_date, 'ticket': ticket, 'description': description, 'time': { 'hours': time[0], 'minutes': time[1] } })
# Initialize the final list final_list = [] """ Get all jiras for the given query Go thru each jira and grab epic name and initiative name, if available customfield_10004 is epic link customfield_10006 is epic name customfield_11400.data.summary is initiative name This code will work even if there are no initiatives in your JIRA instance. (Initiatives are available when you enable JIRA Portfolio) """ for key in issues_in_sprint: issue = jira.issue(key) issue_key = issue.key assignee = "Unassigned" if issue.fields.assignee is not None: assignee = issue.fields.assignee.displayName issue_type = issue.fields.issuetype.name issue_summary = issue.fields.summary issue_status = issue.fields.status.name var_list = [] var_list.append(issue_key) var_list.append(assignee) var_list.append(issue_type) var_list.append(issue_summary)
class JiraReader: """ Class for querying information from jira """ def __init__(self, url, re_author, re_remove): """ Initialization :param url: :param re_author: :param re_remove: """ self._jira = JIRA(url) self._re_author = re_author self._re_remove = re_remove def query_issue(self, issue_id): """ Query issue by issue ID :param issue_id: :return: """ issue = self._jira.issue(issue_id) return issue, self._comment_and_attachment_process(issue=issue) def _comment_and_attachment_process(self, issue): """ Process comments :param issue: :return: """ list_comments = [] list_attachment = [] for c in issue.fields.comment.comments: if re.match(self._re_author, str(c.author)) is not None: list_comments.append(str(c.author)) continue cid = CommentInDiscussion(author=str(c.author), date=str(c.created), content=re.sub(self._re_remove, '', c.body.strip(), flags=re.DOTALL)) list_comments.append(cid) for a in issue.fields.attachment: if not str(a.filename).endswith('.patch'): continue at = Attachment(author=str(a.author), date=str(a.created), content=str(a.content), filename=str(a.filename)) list_attachment.append(at) return list_comments, list_attachment def query_issue_ids_with_index(self, project_key, start_idx, block_size): """ Query issue by project key :param project_key: :param start_idx: :param block_size: :return: """ issues = self._jira.search_issues('project=' + project_key, start_idx, block_size) return [issue.key for issue in issues]
def delete_issue(request, find): j_issue = JIRA_Issue.objects.get(finding=find) jira = JIRA(server=jira_conf.url, basic_auth=(jira_conf.username, jira_conf.password)) issue = jira.issue(j_issue.jira_id) issue.delete()
class JiraProject: """ Client in charge to retrieve all issues and comments for a given JIRA project """ def __init__(self, jira_url, project_key): self.jira_url = jira_url self.jira_client = JIRA( options={'server': self.jira_url, 'verify': False}, validate=False) self.project_key = project_key def get_comments(self, issue): return self.jira_client.issue(issue.key, expand='comments') def get_project_versions(self): return self.jira_client.project_versions(self.project_key) @staticmethod def get_attachments(issue): try: return issue.fields.attachment except AttributeError: return None @staticmethod def get_assignee(issue): try: return issue.fields.assignee.name except AttributeError: return None @staticmethod def get_creation_datetime(issue): return issue.fields.created @staticmethod def get_fix_version(issue): try: fix_versions = issue.fields.fixVersions if len(fix_versions) > 0: return fix_versions[0].name else: return None except AttributeError: return None @staticmethod def get_priority(issue): try: return issue.fields.priority.name except AttributeError: return None @staticmethod def get_resolution(issue): if issue.fields.resolution is not None: return issue.fields.resolution.name else: return None @staticmethod def get_title(issue): return issue.fields.summary @staticmethod def get_type(issue): return issue.fields.issuetype.name @staticmethod def is_closed(issue): return issue.fields.resolution is not None def get_issues(self): start_index = 0 max_nb_results = 100 result = [] # while start_index < max_nb_results: while True: issues = self.jira_client.search_issues( 'project=' + self.project_key, startAt=start_index, maxResults=max_nb_results) result.extend(issues) if len(issues) == 0 or len(issues) < max_nb_results: break else: start_index += max_nb_results return sorted(result, key=lambda issue: int( issue.key[issue.key.index('-') + 1:])) def get_attachment_information(self): start_index = 0 max_nb_results = 100 result = [] # while start_index < max_nb_results \ while True: issues = self.jira_client.search_issues( 'project=' + self.project_key, fields='attachment', startAt=start_index, maxResults=max_nb_results) for issue in issues: a = self.get_attachments(issue) if a is not None and len(a) > 0: [result.append((issue.key, v.id)) for v in a] if len(issues) == 0 or len(issues) < max_nb_results: break else: start_index += max_nb_results return result def get_attachment(self, attachment_id): return self.jira_client.attachment(attachment_id)
break if(version_exists): print('version ' + release_version + ' in project ' + project + ' exists - dont create one\n') else: print('version ' + release_version + ' in project ' + project + ' not found - creating it!\n') version = jira.create_version(release_version, project_version) issues = [] added = [] bugs = [] issues = scan_for_tickets() for issueCode in issues: try: issue = jira.issue(issueCode) except JIRAError as e: print(issueCode + "not found") set_fixVersions(issue, version) if issue.fields.issuetype.name in bugTypes: bugs.append(issue) elif issue.fields.issuetype.name in ignoredTypes: # This issue is of a type that we want to ignore; continue with the next one. continue elif issue.fields.issuetype.name in featureTypes: added.append(issue) else: added.append(issue) changelogHeading = "## [" + release_version + "] " + buildType + " " + props['buildNumber'] + " - " + datetime.today().strftime("%Y-%m-%d") + "\n" changelog = ""
class JiraTicket: def __init__(self): #this list is where you extend, don't touch anything else self.employees = [ employee1, employee2 #extend here ] self.new_tickets = {} self.old_tickets = {} self.closed_tickets = {} self.updated_tickets = {} self.needs_update = {} jira_options = {'server': server, 'verify': False} self.jira = JIRA(options=jira_options, basic_auth=('username', 'password')) def get_unix_timestamp(self, issue): split = issue.fields.updated.split('T') split1 = split[1].split('.') created = split[0] + " " + split1[0] return (datetime.strptime(created, '%Y-%m-%d %H:%M:%S')).strftime('%s') #unix_t = timestamp.strftime('%s') return int(unix_t) def get_new_tickets(self, username): JQL = "reporter=" + username + " and status in ('In Progress', open, reopened)" + " order by created desc" JQL_query = self.jira.search_issues(JQL, maxResults=50) for item in JQL_query: self.new_tickets[str(item)] = {} #TODO: don't need this issue = self.jira.issue(str(item)) comments = self.jira.comments(str(item)) self.new_tickets[str(item)] = { 'commentCount': len(issue.fields.comment.comments), 'timestamp': int(datetime.now().strftime('%s')), 'latestCommentAuthor': None } if self.new_tickets[str(item)]['commentCount'] > 0: self.new_tickets[str(item)]['latestCommentAuthor'] = comments[ -1].raw['updateAuthor']['displayName'] else: self.new_tickets[str(item)]['latestCommentAuthor'] = str( issue.fields.reporter) def write_dict_to_file(self, dict): pickle.dump(dict, open("old_jira_tickets.p", "wb")) pickle.dump(dict, open("backup_old_jira_tickets.p", "wb")) def load_dict_from_file(self): try: self.old_tickets = pickle.load(open("old_jira_tickets.p", "rb")) except: self.old_tickets = pickle.load( open("backup_old_jira_tickets.p", "rb")) def compare_old_to_new(self): to_delete_from_old = [] to_delete_from_new = [] for key in self.old_tickets: old_issue = self.old_tickets[key] #check for updated tickets if key in self.new_tickets.keys(): if old_issue['commentCount'] == self.new_tickets[key][ 'commentCount']: if (int(datetime.now().strftime('%s')) - int(old_issue['timestamp'])) > 7 * 24 * 60 * 60: self.needs_update[key] = {} old_issue['timestamp'] = self.new_tickets[key][ 'timestamp'] if old_issue['commentCount'] > 0: self.needs_update[key] = old_issue[ 'latestCommentAuthor'] #else it's been updated else: self.updated_tickets[key] = {} if old_issue['commentCount'] > 0: self.updated_tickets[key] = old_issue[ 'latestCommentAuthor'] old_issue['commentCount'] = self.new_tickets[key][ 'commentCount'] old_issue['timestamp'] = self.new_tickets[key]['timestamp'] to_delete_from_new.append(key) #else it's closed else: self.closed_tickets[key] = {} self.closed_tickets[key] = old_issue['latestCommentAuthor'] to_delete_from_old.append(key) for key in to_delete_from_old: del (self.old_tickets[key]) for key in to_delete_from_new: del (self.new_tickets[key]) #at this point the only tickets left in new_tickets are newly created -> add them to #old_tickets and recently updated for key in self.new_tickets.keys(): ticket = self.new_tickets[key] self.old_tickets[key] = {} self.old_tickets[key]['commentCount'] = ticket['commentCount'] self.old_tickets[key]['timestamp'] = ticket['timestamp'] self.old_tickets[key]['latestCommentAuthor'] = ticket[ 'latestCommentAuthor'] self.updated_tickets[key] = {} self.updated_tickets[key] = ticket['latestCommentAuthor'] def test(self): jt.old_tickets['ticket']['commentCount'] = 5 #create updated ticket jt.old_tickets['ticket'][ 'timestamp'] -= 8 * 24 * 60 * 60 #create needs update ticket del (jt.new_tickets['ticket']) #create closed ticket def print_all_lists(self): print "new tickets" for key in self.new_tickets.keys(): print key print "\nold tickets" for key in self.old_tickets.keys(): print key print "\nupdated tickets" for key in self.updated_tickets.keys(): print key print "\nclosed tickets" for key in self.closed_tickets.keys(): print key # print "\nneeds update tickets" # for key in self.needs_update.keys(): # print key print "\n"
class JiraBackend(BaseBackend): """ Backend that collects data via the JIRA API """ @property def identifier(self): """ Identifier of the backend (jira) """ return 'jiraOld' def __init__(self, cfg, issue_system_id, project_id): """ Initialization Initializes the people dictionary see: :func:`~issueshark.backends.jirabackend.JiraBackend._get_people` Initializes the attribute mapping: Maps attributes from the JIRA API to our database design :param cfg: holds als configuration. Object of class :class:`~issueshark.config.Config` :param issue_system_id: id of the issue system for which data should be collected. :class:`bson.objectid.ObjectId` :param project_id: id of the project to which the issue system belongs. :class:`bson.objectid.ObjectId` """ super().__init__(cfg, issue_system_id, project_id) logger.setLevel(self.debug_level) self.people = {} self.jira_client = None self.at_mapping = { 'summary': 'title', 'description': 'desc', 'created': 'created_at', 'updated': 'updated_at', 'creator': 'creator_id', 'reporter': 'reporter_id', 'issuetype': 'issue_type', 'priority': 'priority', 'status': 'status', 'versions': 'affects_versions', 'components': 'components', 'labels': 'labels', 'resolution': 'resolution', 'fixVersions': 'fix_versions', 'assignee': 'assignee_id', 'issuelinks': 'issue_links', 'parent': 'parent_issue_id', 'timeoriginalestimate': 'original_time_estimate', 'environment': 'environment' } def process(self): """ Processes the data from the JIRA API. 1. Connects to JIRA 2. Gets the last stored issues updated_at field 3. Collects issues that were changed since this date 4. Calls :func:`~issueshark.backends.jirabackend.JiraBackend._process_issue` for every found issue :return: """ logger.info("Starting the collection process...") if self.config.use_token(): raise JiraException( 'Jira does not support tokens! Use issue_user and issue_password instead' ) # We need to get the name of the project (how it is called in jira, e.g. 'ZOOKEEPER') project_name = self.config.tracking_url.split('=')[-1] parsed_url = urlparse(self.config.tracking_url) # We need to add the original server (e.g. https://issues.apache.org) options = { 'server': parsed_url.scheme + "://" + parsed_url.netloc, } # If the path does not start with /rest, meaning there is something in between (e.g. "/jira"), we need # to add that to the server # TODO only works for one path part if not parsed_url.path.startswith('/rest'): options['server'] = options[ 'server'] + '/' + parsed_url.path.split('/')[1] # Connect to jira self.jira_client = JIRA(options, basic_auth=(self.config.issue_user, self.config.issue_password), proxies=self.config.get_proxy_dictionary()) # Get last modification date (since then, we will collect bugs) last_issue = Issue.objects( issue_system_id=self.issue_system_id).order_by('-updated_at').only( 'updated_at').first() if last_issue is not None: starting_date = last_issue.updated_at query = "project=%s and updatedDate > '%s' ORDER BY createdDate ASC" % ( project_name, starting_date.strftime('%Y/%m/%d %H:%M')) else: query = "project=%s ORDER BY createdDate ASC" % project_name # We search our intital set of issues issues = self.jira_client.search_issues(query, startAt=0, maxResults=50, fields='summary') logger.debug( 'Found %d issues via url %s' % (len(issues), self.jira_client._get_url( 'search?jql=%s&startAt=0&maxResults=50' % quote_plus(query)))) # If no new bugs found, return if len(issues) == 0: logger.info('No new issues found. Exiting...') sys.exit(0) # Otherwise, go through all issues processed_results = 50 while len(issues) > 0: logger.info("Processing %d issues..." % len(issues)) for issue in issues: self._process_issue(issue.key) # Go through the next issues issues = self.jira_client.search_issues(query, startAt=processed_results, maxResults=50, fields='summary') logger.debug('Found %d issues via url %s' % (len(issues), self.jira_client._get_url( 'search?jql=%s&startAt=%d&maxResults=50' % (quote_plus(query), processed_results)))) processed_results += 50 def _transform_jira_issue(self, jira_issue): """ Transforms the Jira issue to our issue model :param jira_issue: original jira issue, like we got it from the Jira API """ try: # We can not return here, as the issue might be updated. This means, that the title could be updated # as well as comments and new events mongo_issue = Issue.objects(issue_system_id=self.issue_system_id, external_id=jira_issue.key).get() except DoesNotExist: mongo_issue = Issue( issue_system_id=self.issue_system_id, external_id=jira_issue.key, ) for at_name_jira, at_name_mongo in self.at_mapping.items(): # If the attribute is in the rest response set it if hasattr(jira_issue.fields, at_name_jira): if isinstance(getattr(mongo_issue, at_name_mongo), list): # Get the result and the current value and merge it together result = self._parse_jira_field(jira_issue.fields, at_name_jira) current_value = getattr(mongo_issue, at_name_mongo, list()) if not isinstance(result, list): result = [result] # Extend current_value.extend(result) if len(current_value ) > 0 and at_name_mongo == 'issue_links': current_value = [ dict(t) for t in set( [tuple(d.items()) for d in current_value]) ] else: current_value = list(set(current_value)) # Set the attribute setattr(mongo_issue, at_name_mongo, copy.deepcopy(current_value)) else: setattr( mongo_issue, at_name_mongo, self._parse_jira_field(jira_issue.fields, at_name_jira)) return mongo_issue.save() def _parse_jira_field(self, jira_issue_fields, at_name_jira): """ Parses the jira fields from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ field_mapping = { 'summary': self._parse_string_field, 'description': self._parse_string_field, 'created': self._parse_date_field, 'updated': self._parse_date_field, 'creator': self._parse_author_details, 'reporter': self._parse_author_details, 'issuetype': self._parse_string_field, 'priority': self._parse_string_field, 'status': self._parse_string_field, 'versions': self._parse_array_field, 'components': self._parse_array_field, 'labels': self._parse_array_field, 'resolution': self._parse_string_field, 'fixVersions': self._parse_array_field, 'assignee': self._parse_author_details, 'issuelinks': self._parse_issue_links, 'parent': self._parse_parent_issue, 'timeoriginalestimate': self._parse_string_field, 'environment': self._parse_string_field } correct_function = field_mapping.get(at_name_jira) return correct_function(jira_issue_fields, at_name_jira) def _parse_string_field(self, jira_issue_fields, at_name_jira): """ Parses the string jira fields from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ attribute = getattr(jira_issue_fields, at_name_jira) if hasattr(attribute, 'name'): return getattr(attribute, 'name') else: return attribute def _parse_date_field(self, jira_issue_fields, at_name_jira): """ Parses the date jira fields from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ return dateutil.parser.parse(getattr(jira_issue_fields, at_name_jira)) def _parse_parent_issue(self, jira_issue_fields, at_name_jira): """ Parses the parent issue field from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ return self._get_issue_id_by_system_id(jira_issue_fields.parent.key) def _parse_author_details(self, jira_issue_fields, at_name_jira): """ Parses the author detail fields from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ people = getattr(jira_issue_fields, at_name_jira) if people is not None: return self._get_people(people.name, people.emailAddress, people.displayName) return None def _parse_array_field(self, jira_issue_fields, at_name_jira): """ Parses the array fields from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ array_field = getattr(jira_issue_fields, at_name_jira) new_array = [] for value in array_field: if hasattr(value, 'name'): new_array.append(getattr(value, 'name')) else: new_array.append(value) return new_array def _parse_issue_links(self, jira_issue_fields, at_name_jira): """ Parses the issue links field from the original issue :param jira_issue_fields: fields of the original jira issue :param at_name_jira: attribute name that should be returned """ links = [] for issue_link in getattr(jira_issue_fields, at_name_jira): if hasattr(issue_link, 'outwardIssue'): issue_id = self._get_issue_id_by_system_id( issue_link.outwardIssue.key) issue_type, issue_effect = self._get_issue_link_type_and_effect( issue_link.type.outward) else: issue_id = self._get_issue_id_by_system_id( issue_link.inwardIssue.key) issue_type, issue_effect = self._get_issue_link_type_and_effect( issue_link.type.inward) links.append({ 'issue_id': issue_id, 'type': issue_type, 'effect': issue_effect }) return links def _process_issue(self, issue_key): """ Processes the issue. 1. Transformation of the jira issue into a mongo db issue (can directly be saved).\ See: :func:`~issueshark.backends.jirabackend.JiraBackend._transform_jira_issue` 2. Go through the whole history of the jira issue, create events and set back the values. \ See: :func:`~issueshark.backends.jirabackend.JiraBackend._process_event` 3. This way, we get the ORIGINAL issue that was posted in jira, which is then saved in the issue collection \ --> Some things can not be turned back, e.g. issue links, as there is information missing in the changelog 4. Comments of the issue are processed (and stored). \ See: :func:`~issueshark.backends.jirabackend.JiraBackend._process_comments` :param issue_key: key of the issue (e.g. ZOOKEEPER-2124) """ issue_not_retrieved = True timeout_start = time.time() timeout = 300 # 5 minutes # Retrieve the issue via the client and retry as long as the timeout is not running out while issue_not_retrieved and time.time() < timeout_start + timeout: try: issue = self.jira_client.issue(issue_key, expand='changelog') issue_not_retrieved = False except JIRAError: time.sleep(30) pass if time.time() >= timeout_start + timeout: logger.error('Could not get issue: %s' % issue_key) return logger.debug( 'Processing issue %s via url %s' % (issue, self.jira_client._get_url('issue/%s?expand=changelog' % issue))) logger.debug('Got fields: %s' % vars(issue.fields)) # Transform jira issue to mongo issue mongo_issue = self._transform_jira_issue(issue) logger.debug('Transformed issue: %s' % mongo_issue) # Go through all events and set back issue items till we get the original one events = [] for history in reversed(issue.changelog.histories): i = 0 created_at = dateutil.parser.parse(history.created) # It can happen that an event does not have an author (e.g., ZOOKEEPER-2218) author_id = None if hasattr(history, 'author'): author_id = self._get_people(history.author.name, name=history.author.displayName, email=history.author.emailAddress) for jira_event in reversed(history.items): unique_event_id = str(history.id) + "%%" + str(i) logger.debug('Processing changelog entry: %s' % vars(jira_event)) # Create event list event, newly_created = self._process_event( created_at, author_id, jira_event, unique_event_id, mongo_issue) logger.debug('Newly created?: %s, Resulting event: %s' % (newly_created, event)) # Append to list if event is not stored in db if newly_created: events.append(event) i += 1 logger.debug('Original issue to store: %s' % mongo_issue) # We need to set the status to open here, as this is the first status for every issue mongo_issue.status = 'Open' # Update issue mongo_issue.save() # Set issue_id for event list and bulk write if events: Event.objects.insert(events, load_bulk=False) # Store comments of issue self._process_comments(issue, mongo_issue.id) def _process_comments(self, issue, issue_id): """ Processes the comments from an jira issue :param issue: original jira issue :param issue_id: Object of class :class:`bson.objectid.ObjectId`. Identifier of the document that holds \ the issue information """ # Go through all comments of the issue comments_to_insert = [] logger.info('Processing %d comments...' % len(issue.fields.comment.comments)) for comment in issue.fields.comment.comments: logger.debug('Processing comment: %s' % comment) created_at = dateutil.parser.parse(comment.created) try: mongo_comment = IssueComment.objects(external_id=comment.id, issue_id=issue_id).get() logger.debug('Comment already in database, id: %s' % mongo_comment.id) continue except DoesNotExist: mongo_comment = IssueComment( external_id=comment.id, issue_id=issue_id, created_at=created_at, author_id=self._get_people(comment.author.name, comment.author.emailAddress, comment.author.displayName), comment=comment.body, ) logger.debug('Resulting comment: %s' % mongo_comment) comments_to_insert.append(mongo_comment) # If comments need to be inserted -> bulk insert if comments_to_insert: IssueComment.objects.insert(comments_to_insert, load_bulk=False) def _get_issue_id_by_system_id(self, system_id, refresh_key=False): """ Gets the issue id like it is stored in the mongodb for a system id (like the id that was assigned by jira to the issue) :param system_id: id of the issue like it was assigned by jira :param refresh_key: if set to true, jira is contacted to get the newest system id for this issue :return: """ if refresh_key: system_id = self._get_newest_key_for_issue(system_id) try: issue_id = Issue.objects(issue_system_id=self.issue_system_id, external_id=system_id).only('id').get().id except DoesNotExist: issue_id = Issue(issue_system_id=self.issue_system_id, external_id=system_id).save().id return issue_id def _set_back_mongo_issue(self, mongo_issue, mongo_at_name, jira_event): """ Sets back the issue like it stored in the mongodb for this jira event :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ function_mapping = { 'title': self._set_back_string_field, 'desc': self._set_back_string_field, 'issue_type': self._set_back_string_field, 'priority': self._set_back_string_field, 'status': self._set_back_string_field, 'affects_versions': self._set_back_array_field, 'components': self._set_back_array_field, 'labels': self._set_back_labels, 'resolution': self._set_back_string_field, 'fix_versions': self._set_back_array_field, 'assignee_id': self._set_back_assignee, 'issue_links': self._set_back_issue_links, 'parent_issue_id': self._set_back_parent_id, 'original_time_estimate': self._set_back_string_field, 'environment': self._set_back_string_field, } correct_function = function_mapping[mongo_at_name] correct_function(mongo_issue, mongo_at_name, jira_event) def _set_back_labels(self, mongo_issue, mongo_at_name, jira_event): """ Set back the labels array for the event. Somehow the labels are handled differently than, e.g., components. \ Different labels are split by a space :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ old_value = getattr(jira_event, 'fromString') new_value = getattr(jira_event, 'toString') item_list = getattr(mongo_issue, mongo_at_name) if old_value: for item in old_value.split(" "): item_list.append(item) if new_value: for item in new_value.split(" "): item_list.remove(item) setattr(mongo_issue, mongo_at_name, item_list) def _set_back_string_field(self, mongo_issue, mongo_at_name, jira_event): """ Set back the string fields for the event. :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ setattr(mongo_issue, mongo_at_name, getattr(jira_event, 'fromString')) def _set_back_array_field(self, mongo_issue, mongo_at_name, jira_event): """ Set back the array fields for the event. :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ old_value = getattr(jira_event, 'fromString') new_value = getattr(jira_event, 'toString') item_list = getattr(mongo_issue, mongo_at_name) if old_value: item_list.append(old_value) if new_value: item_list.remove(new_value) setattr(mongo_issue, mongo_at_name, item_list) def _set_back_assignee(self, mongo_issue, mongo_at_name, jira_event): """ Set back the assignee field for the event. :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ old_assignee = getattr(jira_event, 'from') if old_assignee is not None: setattr(mongo_issue, mongo_at_name, self._get_people(old_assignee)) else: setattr(mongo_issue, mongo_at_name, None) def _set_back_parent_id(self, mongo_issue, mongo_at_name, jira_event): """ Set back the parent id field for the event. :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ old_parent_id = getattr(jira_event, 'from') if old_parent_id is not None: setattr( mongo_issue, mongo_at_name, self._get_issue_id_by_system_id(old_parent_id, refresh_key=True)) else: setattr(mongo_issue, mongo_at_name, None) def _set_back_issue_links(self, mongo_issue, mongo_at_name, jira_event): """ Set back the issue links field for the event. :param mongo_issue: issue like it is stored in the mongodb :param mongo_at_name: attribute name of the field of the issue that is affected by the event :param jira_event: original event that was acquired by the jira api """ item_list = getattr(mongo_issue, mongo_at_name) # Everything that is added in this event must be removed if getattr(jira_event, 'to'): issue_id = self._get_issue_id_by_system_id(getattr( jira_event, 'to'), refresh_key=True) link_type, link_effect = self._get_issue_link_type_and_effect( getattr(jira_event, 'toString')) found_index = 0 for stored_issue in item_list: if stored_issue['issue_id'] == issue_id and stored_issue['effect'].lower() == link_effect.lower() and \ stored_issue['type'].lower() == link_type.lower(): break found_index += 1 try: del item_list[found_index] except IndexError: logger.warning( 'Could not find issue link %s to issue %s to delete in issue %s' % (getattr(jira_event, 'toString'), getattr(jira_event, 'to'), mongo_issue)) # Everything that was before, must be added if getattr(jira_event, 'from'): issue_id = self._get_issue_id_by_system_id(getattr( jira_event, 'from'), refresh_key=True) link_type, link_effect = self._get_issue_link_type_and_effect( getattr(jira_event, 'fromString')) already_in_list = False for stored_issue in item_list: if stored_issue['issue_id'] == issue_id and stored_issue['effect'].lower() == link_effect.lower() \ and stored_issue['type'].lower() == link_type.lower(): already_in_list = True if not already_in_list: item_list.append({ 'issue_id': issue_id, 'type': link_type, 'effect': link_effect }) setattr(mongo_issue, mongo_at_name, item_list) def _get_issue_link_type_and_effect(self, msg_string): """ Gets the correct issue link type and effect from a message :param msg_string: String from which type and effect should be acquired """ if "Blocked" in msg_string: return "Blocked", "Blocked" elif "is blocked by" in msg_string: return "Blocker", "is blocked by" elif "blocks" in msg_string: return "Blocker", "blocks" elif "is cloned by" in msg_string: return "Cloners", "is cloned by" elif "is a clone of" in msg_string or "is cloned as" in msg_string: return "Cloners", "is cloned by" elif "Is contained by" in msg_string or "is contained by" in msg_string: return "Container", "is contained by" elif "contains" in msg_string: return "Container", "contains" elif "Dependent" in msg_string: return "Dependent", "Dependent" elif "is duplicated by" in msg_string: return "Duplicate", "is duplicated by" elif "duplicates" in msg_string: return "Duplicate", "duplicates" elif "is part of" in msg_string: return "Incorporates", "is part of" elif "incorporates" in msg_string: return "Incorporates", "incorporates" elif "is related to" in msg_string: return "Reference", "is related to" elif "relates" in msg_string: return "Reference", "relates to" elif "is broken by" in msg_string: return "Regression", "is broken by" elif "breaks" in msg_string: return "Regression", "breaks" elif "is required by" in msg_string: return "Required", "is required by" elif "requires" in msg_string: return "Required", "requires" elif "is superceded by" in msg_string: return "Supercedes", "is superceded by" elif "supercedes" in msg_string: return "Supercedes", "supercedes" elif "is depended upon by" in msg_string: return "Dependent", "is depended upon by" elif "depends upon" in msg_string: return "Dependent", "depends upon" elif "depends on" in msg_string: return "Dependent", "depends on" else: logger.warning( "Could not find issue type and effect of string %s" % msg_string) return None, None def _process_event(self, created_at, author_id, jira_event, unique_event_id, mongo_issue): """ Processes the jira event for an issue. Goes through the event and sets back the mongo issue accordingly. :param created_at: date when the issue was created :param author_id: id of the author, who created this issue :param jira_event: original jira event, like it was acquired from the REST API :param unique_event_id: unique id to identify the event :param mongo_issue: issue that conforms to our issue model """ terminology_mapping = { 'Component': 'components', 'Link': 'issuelinks', 'Fix Version': 'fixVersions', 'Version': 'versions', 'Labels': 'labels', 'Parent': 'parent' } is_new_event = True try: mongo_event = Event.objects(external_id=unique_event_id, issue_id=mongo_issue.id).get() is_new_event = False except DoesNotExist: mongo_event = Event(external_id=unique_event_id, issue_id=mongo_issue.id, created_at=created_at, author_id=author_id) # We need to map back the jira terminology from getting the issues to the terminology in the histories try: jira_at_name = terminology_mapping[getattr(jira_event, 'field')] except KeyError: jira_at_name = getattr(jira_event, 'field') # Map jira terminology to our terminology try: mongo_event.status = self.at_mapping[jira_at_name] except KeyError: logger.warning('Mapping for attribute %s not found.' % jira_at_name) mongo_event.status = jira_at_name # Check if the mongo_issue has the attribute. # If yes: We can use the mongo_issue to set the old and new value of the event # If no: We use the added / removed fields if hasattr(mongo_issue, mongo_event.status): mongo_event.new_value = copy.deepcopy( getattr(mongo_issue, mongo_event.status)) self._set_back_mongo_issue(mongo_issue, mongo_event.status, jira_event) mongo_event.old_value = copy.deepcopy( getattr(mongo_issue, mongo_event.status)) else: mongo_event.new_value = getattr(jira_event, 'toString') mongo_event.old_value = getattr(jira_event, 'fromString') return mongo_event, is_new_event def _get_newest_key_for_issue(self, old_key): """ Gets the newes key for an issue. We query the saved issue and access it via our jira connection. The jira connection will give us back the NEW value (e.g., if we access via the key ZOOKEEPER-659, we will get back BOOKKEEPER-691 which is the new value :param old_key: old issue key """ try: issue = self.jira_client.issue(old_key, fields='summary') if old_key != issue.key: logger.debug('Got new issue: %s' % issue) return issue.key except JIRAError: # Can happen as issue may be deleted return old_key def _get_people(self, username, email=None, name=None): """ Gets the document from the people collection. First checks the people dictionary to save API requests :param username: username of the person :param email: email of the person :param name: name of the person """ # Check if user was accessed before. This reduces the amount of API requests if username in self.people: return self.people[username] # If email and name are not set, make a request to get the user if email is None and name is None: user = self._get_user(username) email = user.emailAddress name = user.displayName # Replace the email address "anonymization" email = email.replace(' at ', '@').replace(' dot ', '.') people_id = People.objects(name=name, email=email).upsert_one( name=name, email=email, username=username).id self.people[username] = people_id return people_id def _get_user(self, username): """ Gets the user via the jira client :param username: username of the jira user """ # Get user via the jira client if username is None: return None return self.jira_client.find('user?username={0}', username)
#!/usr/bin/env python from jira import JIRA import json import os user = os.environ['JIRA_USER'] password = os.environ['JIRA_PASSWORD'] jira = JIRA('https://boundary.jira.com', basic_auth=(user, password)) issue = jira.issue('PLUG-8') print(type(issue)) #print(json.dumps(issue.fields)) print(issue.fields.project.key) print(issue.fields.issuetype.name) print(issue.fields.reporter.displayName)
user = input("Username: "******"Test story title from JIRA-Python automation script", description="Test story description from JIRA Python automation script", issuetype={'name': 'Story'}, priority={'name': 'High'}) print('\nNew issue created: ', new_issue) ######## Updating an Issue ###### # Get an issue. issue = jira.issue("JRA-1330") # Change the issue's summary and description. issue.update(summary="I'm different!", description="Changed the summary to be different.") # Change the issue without sending updates issue.update(notify=False, description="Quiet summary update.") # You can update the entire labels field like this issue.update(fields={"labels": ["AAA", "BBB"]}) # Or modify the List of existing labels. The new label is unicode with no # spaces issue.fields.labels.append(u"new_text") issue.update(fields={"labels": issue.fields.labels})
def reopened_task(branch): jira = JIRA(options, basic_auth=(JIRA_USERNAME, PASSWORD)) issue = jira.issue(branch) jira.transition_issue(issue, u'Reopened') jira.add_comment(branch, 'Autotest fail')
print("\n") jql = config.read_config_key('ReleasesJQL') jql = jql.replace('{{from}}', "'" + from_date + "'") jql = jql.replace('{{to}}', "'" + to_date + "'") print("\nUsing the following JQL to get release issues\n", jql) print("Searching for releases...") release_issues = jira.search_issues(jql, maxResults=maxIssuesToGet) releases = 0 found_in_rel = False project_codes_tuple = tuple(config.read_config_key('Projects', ())) print("Scanning found releases for your team/projects tickets as linked issues...") for issueId in release_issues: rel = jira.issue(issueId) if (rel.fields.issuelinks): for link in rel.fields.issuelinks: if hasattr(link, "outwardIssue"): linkedIssue = link.outwardIssue elif hasattr(link, "inwardIssue"): linkedIssue = link.inwardIssue else: linkedIssue = None if linkedIssue is not None: try: linked_issue_key = jira.issue(linkedIssue).key if linked_issue_key.startswith(project_codes_tuple): releases += 1 found_in_rel = True
class Jira: # {{{ Constants BLOCKS = 'Blocks' DUPLICATE = 'Duplicate' RELATES = 'Relates' # }}} # {{{ init(address) - Initialise JIRA class, pointing it to the JIRA endpoint def __init__(self, address='https://r3-cev.atlassian.net'): self.address = address self.jira = None self.mock_key = 1 self.custom_fields_by_name, self.custom_fields_by_key = {}, {} # }}} # {{{ login(user, password) - Log in as a specific JIRA user def login(self, user, password): try: self.jira = JIRA(self.address, basic_auth=(user, password)) for x in self.jira.fields(): if x['custom']: self.custom_fields_by_name[x['name']] = x['key'] self.custom_fields_by_key[x['key']] = x['name'] return self except Exception as error: message = error.message if isinstance(error, JIRAError): message = error.text if error.text and len( error.text) > 0 and not error.text.startswith( '<!') else message raise Exception('failed to log in to JIRA{}{}'.format( ': ' if message else '', message)) # }}} # {{{ search(query) - Search for issues and manually traverse pages if multiple pages are returned def search(self, query, *args): max_count = 50 index, offset, count = 0, 0, max_count query = query.format(*args) if len(args) > 0 else query while count == max_count: try: issues = self.jira.search_issues(query, maxResults=max_count, startAt=offset) count = len(issues) offset += count for issue in issues: index += 1 yield Issue(self, index=index, issue=issue) except JIRAError as error: raise Exception('failed to run query "{}": {}'.format( query, error.text)) # }}} # {{{ find(key) - Look up issue by key def find(self, key): try: issue = self.jira.issue(key) return Issue(self, issue=issue) except JIRAError as error: raise Exception('failed to look up issue "{}": {}'.format( key, error.text)) # }}} # {{{ create(fields, dry_run) - Create a new issue def create(self, fields, dry_run=False): if dry_run: return Issue(self, fields=fields) try: fields['labels'] = filter(lambda x: x is not None, fields['labels']) issue = self.jira.create_issue(fields) return Issue(self, issue=issue) except JIRAError as error: raise Exception('failed to create issue: {}'.format(error.text)) # }}} # {{{ link(issue_key, other_issue_key, relationship, dry_run) - Link one issue to another def link(self, issue_key, other_issue_key, relationship=RELATES, dry_run=False): if dry_run: return try: self.jira.create_issue_link(type=relationship, inwardIssue=issue_key, outwardIssue=other_issue_key, comment={ 'body': 'Linked {} to {}'.format( issue_key, other_issue_key), }) except JIRAError as error: raise Exception('failed to link {} and {}: {}'.format( issue_key, other_issue_key, error.text))
def delete_issue(request, find): j_issue = JIRA_Issue.objects.get(finding=find) jira = JIRA(server=Tool_Type.url, basic_auth=(Tool_Type.username, Tool_Type.password)) issue = jira.issue(j_issue.jira_id) issue.delete()
def _get_split_issue(jira_server: JIRA, issue: Issue, new_project: Project) -> Optional[Issue]: for new_issue_link in filter(lambda ll: ll.type.id == issue_split_id, issue.fields.issuelinks): if new_issue := getattr(new_issue_link, 'outwardIssue', None): new_issue_fields = jira_server.issue(new_issue.id).fields if new_issue_fields.reporter.key == 'security.automation' and new_issue_fields.project.id == new_project.id: return new_issue
continue text = row[1] keys = row[0].split(",") for key in keys: #key = row[i] jid = 'ENG-%s' % (cleanstring(key)) #print jid #Is it valid number? if not is_valid_jid(jid): results.append(('ERROR', jid, text, 'Not a valid issue #')) continue #Does it exist in Jira? try: issue = jira.issue(jid) except: results.append(('ERROR', jid, text, 'Issue does not exist')) continue #Get the release note and decide what to do with it existing_relnote = getattr(issue.fields, relnote_field) #Has a release note? if existing_relnote: #Are they the same? # Calculate score fuzzyscore = fuzz.ratio(cleanstring(existing_relnote), cleanstring(text)) if fuzzyscore == 100: ##release notes are the same. Nothing to do pass
def to_raw_issue(self, jira_server: JIRA): return jira_server.issue(self.key)
# st.fields.status.name," -Type - ",st.fields.issuetype) if hasattr(st.fields, 'Labels') and st.fields.labels is not None: manualDf.loc[i, 'Labels'] = st.fields.labels # if hasattr(st.fields,'customfield_11000') and st.fields.customfield_11000: # manualDf.loc[i,'DevOwner'] = st.fields.customfield_11000.key return i+1 i = 0 for issue in allIssues: print("Analysing [", issue.key, "]") try: if hasattr(issue.fields, 'parent') and issue.fields.parent.key is not None: parentIssue = jira.issue(issue.fields.parent.key) if parentIssue and hasattr(parentIssue, 'fields'): manualDf.loc[i, 'ParentKey'] = parentIssue.key manualDf.loc[i, 'Status'] = parentIssue.fields.status.name # manualDf.loc[i,'Summary'] = parentIssue.fields.summary #Get Creation date IssueCreationDate = pd.Timestamp(parentIssue.fields.created) manualDf.loc[i, 'CreationDate'] = IssueCreationDate.date() # Get Severity if hasattr(parentIssue.fields, 'customfield_10125'): manualDf.loc[i, 'Severity'] = parentIssue.fields.customfield_10125.value # Get Regression attributes. if hasattr(parentIssue.fields, 'customfield_10101') and parentIssue.fields.customfield_10101:
def load_issue_via_api(issues, persons, url): """ For each issue in the list the history is added via the api :param issues: list of issues :param persons: list of persons from JIRA (incl. e-mail addresses) :param url: the project url """ log.info("Load issue information via api...") jira_project = JIRA(url) for issue in issues: api_issue = jira_project.issue(issue["externalId"], expand="changelog") changelog = api_issue.changelog histories = list() # adds the issue creation time with the default state to an list # list is needed to find out the state the issue had when a comment was written state_changes = [[issue["creationDate"], "open"]] # adds the issue creation time with the default resolution to an list # list is needed to find out the resolution the issue had when a comment was written resolution_changes = [[issue["creationDate"], "unresolved"]] # history changes get visited in time order from oldest to newest for change in changelog.histories: # default values for state and resolution old_state, new_state, old_resolution, new_resolution = "open", "open", "unresolved", "unresolved" # all changes in the issue changelog are checked if they contain an useful information for item in change.items: # state_updated event gets created and added to the issue history if item.field == "status": if item.fromString is not None: old_state = item.fromString.lower() if item.toString is not None: new_state = item.toString.lower() history = dict() history["event"] = "state_updated" history["event_info_1"] = new_state history["event_info_2"] = old_state user = create_user(change.author.name, change.author.name, "") history["author"] = merge_user_with_user_from_csv(user, persons) history["date"] = format_time(change.created) histories.append(history) state_changes.append([history["date"], new_state]) # resolution_updated event gets created and added to the issue history elif item.field == "resolution": if item.fromString is not None: old_resolution = item.fromString.lower() if item.toString is not None: new_resolution = item.toString.lower() history = dict() history["event"] = "resolution_updated" history["event_info_1"] = new_resolution history["event_info_2"] = old_resolution user = create_user(change.author.name, change.author.name, "") history["author"] = merge_user_with_user_from_csv(user, persons) history["date"] = format_time(change.created) histories.append(history) resolution_changes.append([history["date"], new_resolution]) # assigned event gets created and added to the issue history elif item.field == "assignee": history = dict() history["event"] = "assigned" user = create_user(change.author.name, change.author.name, "") history["author"] = merge_user_with_user_from_csv(user, persons) assignee = create_user(item.toString, item.toString, "") assigned_user = merge_user_with_user_from_csv(assignee, persons) history["event_info_1"] = assigned_user["name"] history["event_info_2"] = assigned_user["email"] history["date"] = format_time(change.created) histories.append(history) elif item.field == "Link": # add_link event gets created and added to the issue history if item.toString is not None: history = dict() history["event"] = "add_link" user = create_user(change.author.name, change.author.name, "") history["author"] = merge_user_with_user_from_csv(user, persons) # api returns a text. The issueId is at the end of the text and gets extracted history["event_info_1"] = item.toString.split()[-1] history["event_info_2"] = "issue" history["date"] = format_time(change.created) histories.append(history) # remove_link event gets created and added to the issue history if item.fromString is not None: history = dict() history["event"] = "remove_link" user = create_user(change.author.name, change.author.name, "") history["author"] = merge_user_with_user_from_csv(user, persons) # api returns a text. Th issue id is at the end of the text and gets extracted history["event_info_1"] = item.fromString.split()[-1] history["event_info_2"] = "issue" history["date"] = format_time(change.created) histories.append(history) # state and resolution change lists get sorted by time state_changes.sort(key=lambda x: x[0]) resolution_changes.sort(key=lambda x: x[0]) for comment in issue["comments"]: # the state the issue had when the comment was written is searched out for state in state_changes: if comment["changeDate"] > state[0]: comment["state_on_creation"] = state[1] # the resolution the issue had when the comment was written is searched out for resolution in resolution_changes: if comment["changeDate"] > resolution[0]: comment["resolution_on_creation"] = [str(resolution[1])] issue["history"] = histories
class JiraCI: resolution_state = {"fixed": "1", "wont fixed": "2", "duplicate": "3", "incomplete": "4", "cannot reproduce": "5", "not a bug": "6", "done": "7"} def __init__(self, jira_url, login, password): if version_info[1] <= 6: options = jira_url else: options = {"server": jira_url} self.jira = JIRA(options, basic_auth=(login, password)) @staticmethod def debug_jira(text): stdout.write("[DEBUG JIRA]: {0}\n".format(text)) def check_issue_exist(self, issue_id): try: self.jira.issue(issue_id) except JIRAError as e: print "[-] : {0} - {1}".format(issue_id, e.text) return False else: return True def check_issue_state(self, issue_id, issue_state): jira_issue = self.jira.issue(issue_id) if jira_issue.fields.status.name.lower() == issue_state.lower(): return True else: return False def add_comment(self, issue_id, comment, formatting=False): jira_issue = self.jira.issue(issue_id) if formatting: comment = "{code}" + comment + "{code}" if not self.check_comment_exist(issue_id, comment): self.jira.add_comment(jira_issue, comment) self.debug_jira("Comment (for {0}) : {1} added".format(issue_id, comment.rstrip())) else: self.debug_jira("Comment (for {0}) : {1} already exist".format(issue_id, comment.rstrip())) def assign_issue(self, issue_id, assigned_user): jira_issue = self.jira.issue(issue_id) jira_issue.update(assignee={"name": assigned_user}) def add_link(self, issue_id, title, url): url_object = {"url": url, "title": title} if not self.check_link_exist(issue_id, title, url): self.jira.add_remote_link(issue_id, url_object) self.debug_jira("Link (for {0}) : {1} added".format(issue_id, url)) else: self.debug_jira("Link (for {0}) : {1} already exist".format(issue_id, url)) def resolve_issue_to_reporter(self, issue_id): reporter = self.get_reporter_issue(issue_id) self.jira.transition_issue(issue_id, "5", resolution={"id": self.resolution_state["fixed"]}) self.assign_issue(issue_id, reporter) def get_reporter_issue(self, issue_id): jira_issue = self.jira.issue(issue_id) return jira_issue.fields.reporter.name def check_comment_exist(self, issue_id, new_comment): comments = [c.body for c in self.jira.comments(issue_id)] if new_comment in comments: return True return False def check_link_exist(self, issue_id, title, url): links = [l.raw["object"] for l in self.jira.remote_links(issue_id)] for link in links: if link["title"] == title and link["url"] == url: return True return False def resolve_from_git(self, issue_id, short_commit_message, title_url, package_url): if self.check_issue_exist(issue_id): if not self.check_issue_state(issue_id, "resolved"): self.resolve_issue_to_reporter(issue_id) self.debug_jira("Issue {0} already resolve".format(issue_id)) else: self.debug_jira("Issue {0} resolved".format(issue_id)) self.add_link(issue_id, title_url, package_url) self.add_comment(issue_id, short_commit_message, formatting=True) def refer_from_git(self, issue_id, commit_message): if self.check_issue_exist(issue_id): self.add_comment(issue_id, commit_message, formatting=True)
jira = JIRA(options, basic_auth=("<user-name>", "<api-token>")) """ Get all epics for given initiative key. Get all stories/task for each epic and add up the original estimate and time spent. JIRA does not provide accumulated estimate and time spent at epic level. We would need to look up each story/task and get those data. """ # Validate the input if len(sys.argv) == 1: print( 'No argument is supplied. Use the format "python <program name> <initiative_jira_key>"' ) quit() initiative_jira_key = sys.argv[1] issue = jira.issue(initiative_jira_key) #print('Info: the given jira key is ' + str(issue.fields.issuetype)) if (str(issue.fields.issuetype) != 'Initiative'): print('Specified argument is not the Initiative key') quit() # Get all the epics under the initiative jql = " 'Parent Link' = " + sys.argv[1] issues_in_initiative = jira.search_issues(jql) # Initialize the final list and counters final_list = [] total_initiative_original_estimate = 0 total_initiative_time_spent = 0 for epic in issues_in_initiative:
class JiraIssues(object): APPLICATION = {"type": "www.hackerone.comr", "name": "Hacker One"} SCOPE = ''' h4.Scope ---- asset type: %(type)s asset identifier: %(identifier)s\n''' DESCRIPTION = ''' h4.Report Info ---- Report State: %(state)s Reporter: %(reporter)s Assignee: %(assignee)s Report Created: %(created)s Report Last Activity: %(last_activity)s h4.Weakness ---- name: %(name)s description: %(w_description)s id: %(id)s h4.Severity ---- rating: %(rating)s score: %(score)s' h4.Description ---- %(description)s ''' def __init__(self, server, username, password, project): """Inits jira client. This current setup requires a jira username to be setup with the appropriate permissions in the jira project :type server: string :param server: jira url :type username: string :param username: token :type password: string :param password: jira username password :type project: string :param project: jira project """ self.__jira_server = server self.__username = username self.__password = password self.jira_project = project self._init_jira_client() def _init_jira_client(self): options = {'server': self.__jira_server} def create_custom_field(fields=None): url = self._get_url('field') r = self._session.post(url, data=json.dumps(fields)) if r.status_code != 201: raise JIRAError(r.status_code, request=r) return r # Jira library doesn't have method for creating custom fields setattr(JIRA, 'create_custom_field', create_custom_field) self.jira_client = JIRA(options, basic_auth=(self.__username, self.__password)) def get_jira_projects(self): return self.jira_client.projects() def create_project(self, key, name, jira_type="Software"): return self.jira_client.create_project(key, name, jira_type) def get_jira_issue(self, report): """ Return Jira Issue based on HackerOne Report issue_tracker_reference_id :type report: h1.models.Report :param report: hackerone report :return: Jira Issue """ try: return self.jira_client.issue(report.issue_tracker_reference_id) except JIRAError as e: if e.text == "Issue Does Not Exist": return None else: raise @staticmethod def _get_jira_summary(report): return "%s - %s" % (report.id, report.title) def _get_jira_description(self, report): return self.DESCRIPTION % { 'description': report.vulnerability_information, 'reporter': report.reporter.name, 'assignee': report.assignee.name if report.assignee is not None else "", 'state': report.state, 'created': report.created_at, 'last_activity': report.last_activity_at, 'name': report.weakness.name, 'w_description': report.weakness.description, 'id': report.weakness.external_id, 'rating': report.severity.rating, 'score': report.severity.score } def create_jira_issue(self, report): """ Create Jira Issue https://developer.atlassian.com/server/jira/platform/jira-rest-api-example-create-issue-7897248/ :type report: h1.models.Report :param report: hackerone report :type :return: string :return: Jira ID """ issue_dict = { 'project': { 'key': self.jira_project }, 'summary': self._get_jira_summary(report), 'description': self._get_jira_description(report), 'issuetype': { 'name': 'Bug' }, 'labels': ['hackerOne'] } return self.jira_client.create_issue(fields=issue_dict, prefetch=True) def update_jira_issue(self, report, jira): fields = {} summary = self._get_jira_summary(report) if jira.fields.summary != summary: fields['summary'] = summary description = self._get_jira_description(report) if jira.fields.description != description: fields['description'] = description if fields: logging.info("Updating Existing Jira Issue: %s" % fields.keys()) jira.update(fields=fields) def search_for_jira_issues(self, report_id): """ Perform a Jira query search using JQL :param report_id: hacker one report id :return: returns jira issue match """ return self.jira_client.search_issues( '''project = %s AND summary ~ "%s"''' % (self.jira_project, report_id), maxResults=1) def get_fields(self): return self.jira_client.fields() def create_custom_field(self, fields): return self.jira_client.create_custom_field(fields) def get_remote_links(self, jira): return self.jira_client.remote_links(jira) def add_remote_link(self, report, jira, relationship="Relates"): links = set() # note all rmeote links have to have a global id for link in self.get_remote_links(jira): if hasattr(link, 'globalId'): links.add(link.globalId) if report.id not in links: destination = {'url': report.html_url, 'title': report.title} return self.jira_client.add_remote_link(jira, destination, report.id, self.APPLICATION, relationship) def add_simple_link(self, report, jira): """https://developer.atlassian.com/server/jira/platform/jira-rest-api-for-remote-issue-links/""" link = {'url': report.html_url, 'title': report.title} return self.jira_client.add_simple_link(jira, object=link) def add_jira_attachment(self, jira, attachment, filename): """Add H1 Attachment in Jira :param jira: Jira object that has attachments :param attachment: hacker one attachment object content :param filename: attachment file name :return: return """ return self.jira_client.add_attachment(issue=jira.id, attachment=attachment, filename=filename) def create_comments(self, jira, comment): return self.jira_client.add_comment(jira, comment)
class JiraAPI(object): def __init__(self, hostname=None, username=None, password=None, path="", debug=False, clean_obsolete=True, max_time_window=12, decommission_time_window=3): self.logger = logging.getLogger('JiraAPI') if debug: self.logger.setLevel(logging.DEBUG) if "https://" not in hostname: hostname = "https://{}".format(hostname) self.username = username self.password = password self.jira = JIRA(options={'server': hostname}, basic_auth=(self.username, self.password)) self.logger.info("Created vjira service for {}".format(hostname)) self.all_tickets = [] self.excluded_tickets = [] self.JIRA_REOPEN_ISSUE = "Reopen Issue" self.JIRA_CLOSE_ISSUE = "Close Issue" self.JIRA_RESOLUTION_OBSOLETE = "Obsolete" self.JIRA_RESOLUTION_FIXED = "Fixed" self.template_path = 'vulnwhisp/reporting/resources/ticket.tpl' self.max_ips_ticket = 30 self.attachment_filename = "vulnerable_assets.txt" self.max_time_tracking = max_time_window #in months if path: self.download_tickets(path) else: self.logger.warn( "No local path specified, skipping Jira ticket download.") self.max_decommission_time = decommission_time_window #in months # [HIGIENE] close tickets older than 12 months as obsolete (max_time_window defined) if clean_obsolete: self.close_obsolete_tickets() # deletes the tag "server_decommission" from those tickets closed <=3 months ago self.decommission_cleanup() self.jira_still_vulnerable_comment = '''This ticket has been reopened due to the vulnerability not having been fixed (if multiple assets are affected, all need to be fixed; if the server is down, lastest known vulnerability might be the one reported). - In the case of the team accepting the risk and wanting to close the ticket, please add the label "*risk_accepted*" to the ticket before closing it. - If server has been decommissioned, please add the label "*server_decommission*" to the ticket before closing it. - If when checking the vulnerability it looks like a false positive, _+please elaborate in a comment+_ and add the label "*false_positive*" before closing it; we will review it and report it to the vendor. If you have further doubts, please contact the Security Team.''' def create_ticket(self, title, desc, project="IS", components=[], tags=[], attachment_contents=[]): labels = ['vulnerability_management'] for tag in tags: labels.append(str(tag)) self.logger.info("Creating ticket for project {} title: {}".format( project, title[:20])) self.logger.debug("project {} has a component requirement: {}".format( project, components)) project_obj = self.jira.project(project) components_ticket = [] for component in components: exists = False for c in project_obj.components: if component == c.name: self.logger.debug( "resolved component name {} to id {}".format( c.name, c.id)) components_ticket.append({"id": c.id}) exists = True if not exists: self.logger.error( "Error creating Ticket: component {} not found".format( component)) return 0 new_issue = self.jira.create_issue(project=project, summary=title, description=desc, issuetype={'name': 'Bug'}, labels=labels, components=components_ticket) self.logger.info("Ticket {} created successfully".format(new_issue)) if attachment_contents: self.add_content_as_attachment(new_issue, attachment_contents) return new_issue #Basic JIRA Metrics def metrics_open_tickets(self, project=None): jql = "labels= vulnerability_management and resolution = Unresolved" if project: jql += " and (project='{}')".format(project) self.logger.debug('Executing: {}'.format(jql)) return len(self.jira.search_issues(jql, maxResults=0)) def metrics_closed_tickets(self, project=None): jql = "labels= vulnerability_management and NOT resolution = Unresolved AND created >=startOfMonth(-{})".format( self.max_time_tracking) if project: jql += " and (project='{}')".format(project) return len(self.jira.search_issues(jql, maxResults=0)) def sync(self, vulnerabilities, project, components=[]): #JIRA structure of each vulnerability: [source, scan_name, title, diagnosis, consequence, solution, ips, risk, references] self.logger.info("JIRA Sync started") for vuln in vulnerabilities: # JIRA doesn't allow labels with spaces, so making sure that the scan_name doesn't have spaces # if it has, they will be replaced by "_" if " " in vuln['scan_name']: vuln['scan_name'] = "_".join(vuln['scan_name'].split(" ")) # we exclude from the vulnerabilities to report those assets that already exist with *risk_accepted*/*server_decommission* vuln = self.exclude_accepted_assets(vuln) # make sure after exclusion of risk_accepted assets there are still assets if vuln['ips']: exists = False to_update = False ticketid = "" ticket_assets = [] exists, to_update, ticketid, ticket_assets = self.check_vuln_already_exists( vuln) if exists: # If ticket "resolved" -> reopen, as vulnerability is still existent self.reopen_ticket( ticketid=ticketid, comment=self.jira_still_vulnerable_comment) self.add_label(ticketid, vuln['risk']) continue elif to_update: self.ticket_update_assets(vuln, ticketid, ticket_assets) self.add_label(ticketid, vuln['risk']) continue attachment_contents = [] # if assets >30, add as attachment # create local text file with assets, attach it to ticket if len(vuln['ips']) > self.max_ips_ticket: attachment_contents = vuln['ips'] vuln['ips'] = [ "Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment." .format(assets=len(attachment_contents)) ] try: tpl = template(self.template_path, vuln) except Exception as e: self.logger.error('Exception templating: {}'.format( str(e))) return 0 self.create_ticket(title=vuln['title'], desc=tpl, project=project, components=components, tags=[ vuln['source'], vuln['scan_name'], 'vulnerability', vuln['risk'] ], attachment_contents=attachment_contents) else: self.logger.info( "Ignoring vulnerability as all assets are already reported in a risk_accepted ticket" ) self.close_fixed_tickets(vulnerabilities) # we reinitialize so the next sync redoes the query with their specific variables self.all_tickets = [] self.excluded_tickets = [] return True def exclude_accepted_assets(self, vuln): # we want to check JIRA tickets with risk_accepted/server_decommission or false_positive labels sharing the same source # will exclude tickets older than 12 months, old tickets will get closed for higiene and recreated if still vulnerable labels = [ vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability' ] if not self.excluded_tickets: jql = "{} AND labels in (risk_accepted,server_decommission, false_positive) AND NOT labels=advisory AND created >=startOfMonth(-{})".format( " AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking) self.excluded_tickets = self.jira.search_issues(jql, maxResults=0) title = vuln['title'] #WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists #it wont iterate over the rest of tickets looking for other possible duplicates/similar issues self.logger.info("Comparing vulnerability to risk_accepted tickets") assets_to_exclude = [] tickets_excluded_assets = [] for index in range(len(self.excluded_tickets)): checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields( self.excluded_tickets[index]) if title.encode('ascii') == checking_title.encode('ascii'): if checking_assets: #checking_assets is a list, we add to our full list for later delete all assets assets_to_exclude += checking_assets tickets_excluded_assets.append(checking_ticketid) if assets_to_exclude: assets_to_remove = [] self.logger.warn( "Vulnerable Assets seen on an already existing risk_accepted Jira ticket: {}" .format(', '.join(tickets_excluded_assets))) self.logger.debug("Original assets: {}".format(vuln['ips'])) #assets in vulnerability have the structure "ip - hostname - port", so we need to match by partial for exclusion in assets_to_exclude: # for efficiency, we walk the backwards the array of ips from the scanners, as we will be popping out the matches # and we don't want it to affect the rest of the processing (otherwise, it would miss the asset right after the removed one) for index in range(len(vuln['ips']))[::-1]: if exclusion == vuln['ips'][index].split(" - ")[0]: self.logger.debug( "Deleting asset {} from vulnerability {}, seen in risk_accepted." .format(vuln['ips'][index], title)) vuln['ips'].pop(index) self.logger.debug("Modified assets: {}".format(vuln['ips'])) return vuln def check_vuln_already_exists(self, vuln): ''' This function compares a vulnerability with a collection of tickets. Returns [exists (bool), is equal (bool), ticketid (str), assets (array)] ''' # we need to return if the vulnerability has already been reported and the ID of the ticket for further processing #function returns array [duplicated(bool), update(bool), ticketid, ticket_assets] title = vuln['title'] labels = [ vuln['source'], vuln['scan_name'], 'vulnerability_management', 'vulnerability' ] #list(set()) to remove duplicates assets = list( set( re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips'])))) if not self.all_tickets: self.logger.info( "Retrieving all JIRA tickets with the following tags {}". format(labels)) # we want to check all JIRA tickets, to include tickets moved to other queues # will exclude tickets older than 12 months, old tickets will get closed for higiene and recreated if still vulnerable jql = "{} AND NOT labels=advisory AND created >=startOfMonth(-{})".format( " AND ".join(["labels={}".format(label) for label in labels]), self.max_time_tracking) self.all_tickets = self.jira.search_issues(jql, maxResults=0) #WARNING: function IGNORES DUPLICATES, after finding a "duplicate" will just return it exists #it wont iterate over the rest of tickets looking for other possible duplicates/similar issues self.logger.info("Comparing Vulnerabilities to created tickets") for index in range(len(self.all_tickets)): checking_ticketid, checking_title, checking_assets = self.ticket_get_unique_fields( self.all_tickets[index]) # added "not risk_accepted", as if it is risk_accepted, we will create a new ticket excluding the accepted assets if title.encode('ascii') == checking_title.encode( 'ascii') and not self.is_risk_accepted( self.jira.issue(checking_ticketid)): difference = list( set(assets).symmetric_difference(checking_assets)) #to check intersection - set(assets) & set(checking_assets) if difference: self.logger.info( "Asset mismatch, ticket to update. Ticket ID: {}". format(checking_ticketid)) return False, True, checking_ticketid, checking_assets #this will automatically validate else: self.logger.info( "Confirmed duplicated. TickedID: {}".format( checking_ticketid)) return True, False, checking_ticketid, [ ] #this will automatically validate return False, False, "", [] def ticket_get_unique_fields(self, ticket): title = ticket.raw.get('fields', {}).get('summary').encode("ascii").strip() ticketid = ticket.key.encode("ascii") assets = [] try: affected_assets_section = ticket.raw.get( 'fields', {}).get('description').encode("ascii").split( "{panel:title=Affected Assets}")[1].split("{panel}")[0] assets = list( set( re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section))) except Exception as e: self.logger.error( "Ticket IPs regex failed. Ticket ID: {}. Reason: {}".format( ticketid, e)) assets = [] try: if not assets: #check if attachment, if so, get assets from attachment affected_assets_section = self.check_ips_attachment(ticket) if affected_assets_section: assets = list( set( re.findall( r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", affected_assets_section))) except Exception as e: self.logger.error( "Ticket IPs Attachment regex failed. Ticket ID: {}. Reason: {}" .format(ticketid, e)) return ticketid, title, assets def check_ips_attachment(self, ticket): affected_assets_section = [] try: fields = self.jira.issue(ticket.key).raw.get('fields', {}) attachments = fields.get('attachment', {}) affected_assets_section = "" #we will make sure we get the latest version of the file latest = '' attachment_id = '' if attachments: for item in attachments: if item.get('filename') == self.attachment_filename: if not latest: latest = item.get('created') attachment_id = item.get('id') else: if latest < item.get('created'): latest = item.get('created') attachment_id = item.get('id') affected_assets_section = self.jira.attachment(attachment_id).get() except Exception as e: self.logger.error( "Failed to get assets from ticket attachment. Ticket ID: {}. Reason: {}" .format(ticket, e)) return affected_assets_section def clean_old_attachments(self, ticket): fields = ticket.raw.get('fields') attachments = fields.get('attachment') if attachments: for item in attachments: if item.get('filename') == self.attachment_filename: self.jira.delete_attachment(item.get('id')) def add_content_as_attachment(self, issue, contents): try: #Create the file locally with the data attachment_file = open(self.attachment_filename, "w") attachment_file.write("\n".join(contents)) attachment_file.close() #Push the created file to the ticket attachment_file = open(self.attachment_filename, "rb") self.jira.add_attachment(issue, attachment_file, self.attachment_filename) attachment_file.close() #remove the temp file os.remove(self.attachment_filename) self.logger.info("Added attachment successfully.") except: self.logger.error("Error while attaching file to ticket.") return False return True def get_ticket_reported_assets(self, ticket): #[METRICS] return a list with all the affected assets for that vulnerability (including already resolved ones) return list( set( re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", str(self.jira.issue(ticket).raw)))) def get_resolution_time(self, ticket): #get time a ticket took to be resolved ticket_obj = self.jira.issue(ticket) if self.is_ticket_resolved(ticket_obj): ticket_data = ticket_obj.raw.get('fields') #dates follow format '2018-11-06T10:36:13.849+0100' created = [ int(x) for x in ticket_data['created'].split('.')[0].replace( 'T', '-').replace(':', '-').split('-') ] resolved = [ int(x) for x in ticket_data['resolutiondate'].split('.')[0].replace( 'T', '-').replace(':', '-').split('-') ] start = datetime(created[0], created[1], created[2], created[3], created[4], created[5]) end = datetime(resolved[0], resolved[1], resolved[2], resolved[3], resolved[4], resolved[5]) return (end - start).days else: self.logger.error( "Ticket {ticket} is not resolved, can't calculate resolution time" .format(ticket=ticket)) return False def ticket_update_assets(self, vuln, ticketid, ticket_assets): # correct description will always be in the vulnerability to report, only needed to update description to new one self.logger.info("Ticket {} exists, UPDATE requested".format(ticketid)) #for now, if a vulnerability has been accepted ('accepted_risk'), ticket is completely ignored and not updated (no new assets) #TODO when vulnerability accepted, create a new ticket with only the non-accepted vulnerable assets #this would require go through the downloaded tickets, check duplicates/accepted ones, and if so, #check on their assets to exclude them from the new ticket risk_accepted = False ticket_obj = self.jira.issue(ticketid) if self.is_ticket_resolved(ticket_obj): if self.is_risk_accepted(ticket_obj): return 0 self.reopen_ticket(ticketid=ticketid, comment=self.jira_still_vulnerable_comment) #First will do the comparison of assets ticket_obj.update() assets = list( set( re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ",".join(vuln['ips'])))) difference = list(set(assets).symmetric_difference(ticket_assets)) comment = '' added = '' removed = '' #put a comment with the assets that have been added/removed for asset in difference: if asset in assets: if not added: added = '\nThe following assets *have been newly detected*:\n' added += '* {}\n'.format(asset) elif asset in ticket_assets: if not removed: removed = '\nThe following assets *have been resolved*:\n' removed += '* {}\n'.format(asset) comment = added + removed #then will check if assets are too many that need to be added as an attachment attachment_contents = [] if len(vuln['ips']) > self.max_ips_ticket: attachment_contents = vuln['ips'] vuln['ips'] = [ "Affected hosts ({assets}) exceed Jira's allowed character limit, added as an attachment." .format(assets=len(attachment_contents)) ] #fill the ticket description template try: tpl = template(self.template_path, vuln) except Exception as e: self.logger.error('Exception updating assets: {}'.format(str(e))) return 0 #proceed checking if it requires adding as an attachment try: #update attachment with hosts and delete the old versions if attachment_contents: self.clean_old_attachments(ticket_obj) self.add_content_as_attachment(ticket_obj, attachment_contents) ticket_obj.update(description=tpl, comment=comment, fields={"labels": ticket_obj.fields.labels}) self.logger.info("Ticket {} updated successfully".format(ticketid)) self.add_label(ticketid, 'updated') except Exception as e: self.logger.error( "Error while trying up update ticket {ticketid}.\nReason: {e}". format(ticketid=ticketid, e=e)) return 0 def add_label(self, ticketid, label): ticket_obj = self.jira.issue(ticketid) if label not in [x.encode('utf8') for x in ticket_obj.fields.labels]: ticket_obj.fields.labels.append(label) try: ticket_obj.update(fields={"labels": ticket_obj.fields.labels}) self.logger.info( "Added label {label} to ticket {ticket}".format( label=label, ticket=ticketid)) except: self.logger.error( "Error while trying to add label {label} to ticket {ticket}" .format(label=label, ticket=ticketid)) return 0 def remove_label(self, ticketid, label): ticket_obj = self.jira.issue(ticketid) if label in [x.encode('utf8') for x in ticket_obj.fields.labels]: ticket_obj.fields.labels.remove(label) try: ticket_obj.update(fields={"labels": ticket_obj.fields.labels}) self.logger.info( "Removed label {label} from ticket {ticket}".format( label=label, ticket=ticketid)) except: self.logger.error( "Error while trying to remove label {label} to ticket {ticket}" .format(label=label, ticket=ticketid)) else: self.logger.error( "Error: label {label} not in ticket {ticket}".format( label=label, ticket=ticketid)) return 0 def close_fixed_tickets(self, vulnerabilities): ''' Close tickets which vulnerabilities have been resolved and are still open. Higiene clean up affects to all tickets created by the module, filters by label 'vulnerability_management' ''' found_vulns = [] for vuln in vulnerabilities: found_vulns.append(vuln['title']) comment = '''This ticket is being closed as it appears that the vulnerability no longer exists. If the vulnerability reappears, a new ticket will be opened.''' for ticket in self.all_tickets: if ticket.raw['fields']['summary'].strip() in found_vulns: self.logger.info( "Ticket {} is still vulnerable".format(ticket)) continue self.logger.info( "Ticket {} is no longer vulnerable".format(ticket)) self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment) return 0 def is_ticket_reopenable(self, ticket_obj): transitions = self.jira.transitions(ticket_obj) for transition in transitions: if transition.get('name') == self.JIRA_REOPEN_ISSUE: self.logger.debug("Ticket is reopenable") return True self.logger.warn("Ticket can't be opened. Check Jira transitions.") return False def is_ticket_closeable(self, ticket_obj): transitions = self.jira.transitions(ticket_obj) for transition in transitions: if transition.get('name') == self.JIRA_CLOSE_ISSUE: return True self.logger.warn("Ticket can't closed. Check Jira transitions.") return False def is_ticket_resolved(self, ticket_obj): #Checks if a ticket is resolved or not if ticket_obj is not None: if ticket_obj.raw['fields'].get('resolution') is not None: if ticket_obj.raw['fields'].get('resolution').get( 'name') != 'Unresolved': self.logger.debug( "Checked ticket {} is already closed".format( ticket_obj)) self.logger.info("Ticket {} is closed".format(ticket_obj)) return True self.logger.debug( "Checked ticket {} is already open".format(ticket_obj)) return False def is_risk_accepted(self, ticket_obj): if ticket_obj is not None: if ticket_obj.raw['fields'].get('labels') is not None: labels = ticket_obj.raw['fields'].get('labels') if "risk_accepted" in labels: self.logger.warn( "Ticket {} accepted risk, will be ignored".format( ticket_obj)) return True elif "server_decommission" in labels: self.logger.warn( "Ticket {} server decommissioned, will be ignored". format(ticket_obj)) return True elif "false_positive" in labels: self.logger.warn( "Ticket {} flagged false positive, will be ignored". format(ticket_obj)) return True self.logger.info( "Ticket {} risk has not been accepted".format(ticket_obj)) return False def reopen_ticket(self, ticketid, ignore_labels=False, comment=""): self.logger.debug( "Ticket {} exists, REOPEN requested".format(ticketid)) # this will reopen a ticket by ticketid ticket_obj = self.jira.issue(ticketid) if self.is_ticket_resolved(ticket_obj): if (not self.is_risk_accepted(ticket_obj) or ignore_labels): try: if self.is_ticket_reopenable(ticket_obj): error = self.jira.transition_issue( issue=ticketid, transition=self.JIRA_REOPEN_ISSUE, comment=comment) self.logger.info( "Ticket {} reopened successfully".format(ticketid)) if not ignore_labels: self.add_label(ticketid, 'reopened') return 1 except Exception as e: # continue with ticket data so that a new ticket is created in place of the "lost" one self.logger.error("error reopening ticket {}: {}".format( ticketid, e)) return 0 return 0 def close_ticket(self, ticketid, resolution, comment): # this will close a ticket by ticketid self.logger.debug("Ticket {} exists, CLOSE requested".format(ticketid)) ticket_obj = self.jira.issue(ticketid) if not self.is_ticket_resolved(ticket_obj): try: if self.is_ticket_closeable(ticket_obj): #need to add the label before closing the ticket self.add_label(ticketid, 'closed') error = self.jira.transition_issue( issue=ticketid, transition=self.JIRA_CLOSE_ISSUE, comment=comment, resolution={"name": resolution}) self.logger.info( "Ticket {} closed successfully".format(ticketid)) return 1 except Exception as e: # continue with ticket data so that a new ticket is created in place of the "lost" one self.logger.error("error closing ticket {}: {}".format( ticketid, e)) return 0 return 0 def close_obsolete_tickets(self): # Close tickets older than 12 months, vulnerabilities not solved will get created a new ticket self.logger.info( "Closing obsolete tickets older than {} months".format( self.max_time_tracking)) jql = "labels=vulnerability_management AND created <startOfMonth(-{}) and resolution=Unresolved".format( self.max_time_tracking) tickets_to_close = self.jira.search_issues(jql, maxResults=0) comment = '''This ticket is being closed for hygiene, as it is more than {} months old. If the vulnerability still exists, a new ticket will be opened.'''.format( self.max_time_tracking) for ticket in tickets_to_close: self.close_ticket(ticket, self.JIRA_RESOLUTION_OBSOLETE, comment) return 0 def project_exists(self, project): try: self.jira.project(project) return True except: return False return False def download_tickets(self, path): ''' saves all tickets locally, local snapshot of vulnerability_management ticktes ''' #check if file already exists check_date = str(date.today()) fname = '{}jira_{}.json'.format(path, check_date) if os.path.isfile(fname): self.logger.info( "File {} already exists, skipping ticket download".format( fname)) return True try: self.logger.info( "Saving locally tickets from the last {} months".format( self.max_time_tracking)) jql = "labels=vulnerability_management AND created >=startOfMonth(-{})".format( self.max_time_tracking) tickets_data = self.jira.search_issues(jql, maxResults=0) #end of line needed, as writelines() doesn't add it automatically, otherwise one big line to_save = [ json.dumps(ticket.raw.get('fields')) + "\n" for ticket in tickets_data ] with open(fname, 'w') as outfile: outfile.writelines(to_save) self.logger.info("Tickets saved succesfully.") return True except Exception as e: self.logger.error( "Tickets could not be saved locally: {}.".format(e)) return False def decommission_cleanup(self): ''' deletes the server_decomission tag from those tickets that have been closed already for more than x months (default is 3 months) in order to clean solved issues for statistics purposes ''' self.logger.info( "Deleting 'server_decommission' tag from tickets closed more than {} months ago" .format(self.max_decommission_time)) jql = "labels=vulnerability_management AND labels=server_decommission and resolutiondate <=startOfMonth(-{})".format( self.max_decommission_time) decommissioned_tickets = self.jira.search_issues(jql, maxResults=0) comment = '''This ticket is having deleted the *server_decommission* tag, as it is more than {} months old and is expected to already have been decommissioned. If that is not the case and the vulnerability still exists, the vulnerability will be opened again.'''.format( self.max_decommission_time) for ticket in decommissioned_tickets: #we open first the ticket, as we want to make sure the process is not blocked due to #an unexisting jira workflow or unallowed edit from closed tickets self.reopen_ticket(ticketid=ticket, ignore_labels=True) self.remove_label(ticket, 'server_decommission') self.close_ticket(ticket, self.JIRA_RESOLUTION_FIXED, comment) return 0
class JIRA_PROJECT(object): def __init__(self, server, project, basic_auth=()): self.server = server self.project = project self.basic_auth = basic_auth self._connect_to_jira() def _connect_to_jira(self): try: print 'Tyring to connect JIRA server: {} and project: {}...'.format(self.server, self.project) self.jira = JIRA(server=self.server, basic_auth=self.basic_auth) print 'JIRA server connected successfully...' except Exception as err: print 'JIRA Connection error' print err sys.exit(1) def get_defect_from_qc(self, qc_num): # print getattr(issue.fields, 'summary') # print getattr(issue.fields, '') if self.project is not 'TDETS': print 'Project is not TDETS' sys.exit(0) defects = self.jira.search_issues('project={}'.format(self.project), maxResults=5000) for defect in defects: print 'checking', defect.key #defect = self.jira.issue(defect, fields='assignee,summary,status,customfield_13625,comment') defect = self.jira.issue(defect.key, fields='customfield_13625,summary,status,assignee') if (defect.fields.customfield_13625) and (qc_num in defect.fields.customfield_13625): print defect.key print defect.fields.customfield_13625 print defect.fields.summary print defect.fields.status print defect.fields.assignee return defect.key else: print 'no matching QC found in TDETS' return None def get_my_defects(self): # print getattr(issue.fields, 'summary') # print getattr(issue.fields, 'customfield_13625') defects = self.jira.search_issues('project={} and assignee = currentUser()'.format(self.project)) list_defects = [] for defect in defects: defect = self.jira.issue(defect.key, fields='assignee,summary,status,customfield_13625,comment') if defect.fields.comment.comments: last_comment = defect.fields.comment.comments[-1].raw['body'] else: last_comment = 'No Comment Yet' qc_id = getattr('defect.fields', 'customfield_13625', 'NO QC ID') #qc_id=defect.fields.customfield_13625 defect_summary = DEFECT_SUMMARY(id=defect.key, qc_id=qc_id, summary=defect.fields.summary, status=defect.fields.status, assignee=defect.fields.assignee, last_comment=last_comment, ) list_defects.append(defect_summary) return list_defects def get_all_issues(self): pass def create_new_issue(self): pass def update_defect(self, defect, new_comment=None, new_assignee=None): #https://answers.atlassian.com/questions/8627641/update-custom-field-using-jira-python if new_comment: #how to check new comment is posted correctly comment = self.jira.add_comment(defect, new_comment) if isinstance(comment, jira.resources.Comment): print 'Posted Comment:' print comment.raw['body'] else: print 'Failed' return False if new_assignee: #update new assignee if not self.jira.assign_issue(defect, new_assignee): return False return True def get_defect_history(self, defect): # https://answers.atlassian.com/questions/64708/is-it-possible-to-get-the-issue-history-using-the-rest-api defect = self.jira.issue(defect, expand='changelog') for history in defect.changelog.histories: for item in history.items: if item.field == 'status': print 'Date:' + history.created + ' From:' + item.fromString + ' To:' + item.toString def update_issues_from_file(self, filename): pass
def create_issue(): imapObj = imapclient.IMAPClient('imaphz.qiye.163.com', ssl=True) imapObj.login('*****@*****.**', 'bBDCP5ZKQ72QUzcP') imapObj.select_folder('INBOX') InstanceJira = JIRA('http://172.16.11.219:8080', auth=('guxiaocong', '123456')) UIDS = [] UIDS = imapObj.search('UNSEEN') # get all unread mails and store into UIDs print 'start to search mail.....' rawMessages = imapObj.fetch( UIDS, ['BODY[]']) # get all ID of each mails according to the UIDs if len(UIDS) != 0: for num in rawMessages: messageObj = pyzmail.PyzMessage.factory(rawMessages[num]['BODY[]']) if messageObj.text_part: print 'text' else: if (messageObj.html_part): mailBody = messageObj.html_part.get_payload().decode( messageObj.html_part.charset) # pprint.pprint(mailBody) print 'html' if mailBody.find('<strong>created</strong>' ) != -1 and mailBody.find( 'Issue template (WBSGantt)') != -1: searchsummary_start = mailBody.find('T2M_BUG_SYS') summaryStart = mailBody.find('href', searchsummary_start) summaryEnd = mailBody.find('"', summaryStart + 10) almsummary = mailBody[summaryStart + 6:summaryEnd] searchAffectsVersion_start = mailBody.find( 'Affects Versions:') versionStart = mailBody.find( 'top', searchAffectsVersion_start) versionEnd = mailBody.find('<', versionStart) almversion = mailBody[versionStart + 7:versionEnd] jirabugid = almsummary[almsummary. rfind('/', 0, len(almsummary)) + 1:len(almsummary)] almbug_id, out, err = ImHandler.create_defect( "/TCT/QCT SDM450/T2-PM45 PM85 P upgrade", almsummary.strip(), almversion.strip(), "pm85p-devel", 'yan.han', '1Q2w3e4r!') print('Alm bug created: %s' % almbug_id) if almbug_id != None: #start to fill in ALM ID issue = InstanceJira.issue(jirabugid) issue.update( fields={'customfield_10400': almbug_id}) print( 'Jira field updated. Jira ID: %s, Alm ID: %s' % (jirabugid, almbug_id)) else: print 'cannot find the almbug of: ' + jirabugid SendMail.send_email_by_smtp( 'cannot find the almbug of: ' + jirabugid, 'Error on bug %s\n====> stdout\n%s\n====>stderr\n%s' % (jirabugid, out, err), '*****@*****.**') else: print "this is not a bug creation" imapObj.logout()
def update_tickets_from_git(self, from_commit=None, to_commit=None): """ Find all ticket numbers and update their status in Jira. Run during a deployment. Looks at all commits between now and the last deployment. """ from jira import JIRA, JIRAError from burlap.git import gittracker, CURRENT_COMMIT r = self.local_renderer # get_current_commit = gittracker.get_current_commit # GITTRACKER = gittracker.name.upper() # Ensure this is only run once per role. if self.genv.host_string != self.genv.hosts[-1]: self.vprint('Not first server. Aborting.') return self.vprint('self.env.update_from_git:', self.env.update_from_git) self.vprint('self.genv.jirahelper_update_from_git:', self.genv.jirahelper_update_from_git) if not self.env.update_from_git: self.vprint('Update from git disabled. Aborting.') return if not self.env.ticket_pattern: self.vprint('No ticket pattern defined. Aborting.') return if not self.env.basic_auth_username or not self.env.basic_auth_password: self.vprint('Username or password not given. Aborting.') return # During a deployment, we should be given these, but for testing, # lookup the diffs dynamically. last = gittracker.last_manifest current = gittracker.current_manifest last_commit = from_commit or last.current_commit#[CURRENT_COMMIT] print('last_commit:', last_commit) current_commit = to_commit or current[CURRENT_COMMIT] print('current_commit:', current_commit) if not last_commit or not current_commit: print('Missing commit ID. Aborting.') return self.vprint('-'*80) self.vprint('last.keys:', last.keys()) self.vprint('-'*80) self.vprint('current.keys:', current.keys()) # try: # last_commit = last['GITTRACKER']['current_commit'] # except KeyError: # return # current_commit = current['GITTRACKER']['current_commit'] # Find all tickets deployed between last deployment and now. tickets = self.get_tickets_between_commits(current_commit, last_commit) self.vprint('tickets:', tickets) # Update all tickets in Jira. jira = JIRA({ 'server': self.env.server }, basic_auth=(self.env.basic_auth_username, self.env.basic_auth_password)) for ticket in tickets: # Mention this Jira updated. r.env.role = r.genv.ROLE.lower() comment = r.format(self.env.ticket_update_message_template) print('Commenting on ticket %s: %s' % (ticket, comment)) if not self.dryrun: jira.add_comment(ticket, comment) # Update ticket status. recheck = False while 1: print('Looking up jira ticket %s...' % ticket) issue = jira.issue(ticket) self.vprint('Ticket %s retrieved.' % ticket) transition_to_id = dict((t['name'], t['id']) for t in jira.transitions(issue)) self.vprint('%i allowable transitions found:' % len(transition_to_id)) if self.verbose: pprint(transition_to_id) self.vprint('issue.fields.status.id:', issue.fields.status.id) self.vprint('issue.fields.status.name:', issue.fields.status.name) jira_status_id = issue.fields.status.name.title() self.vprint('jira_status_id:', jira_status_id) next_transition_name = self.env.deploy_workflow.get(jira_status_id) self.vprint('next_transition_name:', next_transition_name) next_transition_id = transition_to_id.get(next_transition_name) self.vprint('next_transition_id:', next_transition_id) if next_transition_name: if issue.fields.assignee: if issue.fields.assignee.raw: assignee_name = issue.fields.assignee.name else: # Get assignee name directly # https://community.atlassian.com/t5/Jira-questions/Jira-in-Python-issue-fields-reporter-name- # errors-with-TypeError/qaq-p/937924 assignee_name = issue.fields.assignee._session['name'] else: assignee_name = None # Get new assignee by status new_assignee = self.env.assignee_by_status.get( #issue.fields.status.name.title(), next_transition_name, assignee_name, ) # If assigning to reporter, get reporter name. if new_assignee == 'reporter': if issue.fields.reporter.raw: new_assignee = issue.fields.reporter.name else: # Get reporter name directly # https://community.atlassian.com/t5/Jira-questions/Jira-in-Python-issue-fields-reporter-name- # errors-with-TypeError/qaq-p/937924 new_assignee = issue.fields.reporter._session['name'] print('Updating ticket %s to status %s (%s) and assigning it to %s.' % (ticket, next_transition_name, next_transition_id, new_assignee)) if not self.dryrun: if next_transition_id: try: jira.transition_issue(issue, next_transition_id) recheck = True except AttributeError as e: print('Unable to transition ticket %s to %s: %s' % (ticket, next_transition_name, e), file=sys.stderr) traceback.print_exc() # Note assignment should happen after transition, since the assignment may # effect remove transitions that we need. try: if new_assignee: print('Assigning ticket %s to %s.' % (ticket, new_assignee)) jira.assign_issue(issue, new_assignee) else: print('No new assignee found.') except JIRAError as e: print('Unable to reassign ticket %s to %s: %s' % (ticket, new_assignee, e), file=sys.stderr) else: recheck = False print('No transitions found for ticket %s currently in status "%s".' % (ticket, issue.fields.status.name)) if not recheck: break
def execute(args_list): args = parse_args(args_list) print("Running JIRA Tabulations for Initiatives") jira_options = {"server": "https://battlefy.atlassian.net"} jira = JIRA( options=jira_options, basic_auth=(args.user, args.api_token), ) if args.initiatives is not None: initiatives = args.initiatives.split(",") elif args.auto_initiatives: # FRONT-15 is the ops epic. query_string = "project=FRONT and type=Epic and id!=Front-15" initiatives = list( set([e.key for e in jira.search_issues(query_string)])) initiatives_container = [] for initiative in initiatives: print("Obtaining roll-up for {}".format(initiative)) initiative_issue = jira.issue(initiative) keys = [ x.inwardIssue.key for x in initiative_issue.fields.issuelinks if hasattr(x, 'inwardIssue') and 'FRONT' not in x.inwardIssue.key and 'SALES' not in x.inwardIssue.key ] filtered_keys = [] curr_initiative = None if initiative_issue.fields.status.name == 'Done': continue if initiative_issue.fields.status.name == 'Initial Estimation': curr_initiative = calculate_initial_estimation( initiative_issue, INITIAL_TIME_KEY, args.story_point_weight, args.story_point_weight_ceiling) else: filtered_keys.extend(keys) curr_initiative = calculate_estimation( args_list, filtered_keys, initiative_issue, args.story_point_weight, args.story_point_weight_ceiling) initiatives_container.append(curr_initiative) if args.update_initiative_estimates: # update the SP estimate on the initiatives for initiative in initiatives_container: print("Updating initiative: {}".format(initiative.initiative.key)) initiative.initiative.update( fields={INITIAL_TIME_KEY: initiative.summed_time}) initiative.initiative.update( fields={REMAINING_TIME_KEY: initiative.remaining_time}) initiative.initiative.update(fields={ CONFIDENCE_INTERVAL_KEY: int(initiative.estimation_confidence) }) initiative.initiative.update( fields={ INCOMPLETE_ISSUE_COUNT_KEY: initiative.incomplete_estimated_count + initiative.incomplete_unestimated_count }) if args.export_estimates: export_initiatives_json(args.export_estimates_path, initiatives_container) month_distributions = {} if args.create_calendar_schedule: skipped_epics = [] print("Calculating calendar rooted capacity demand...") for initiative in initiatives_container: for epic in initiative.epics: initiative_start_date_object = None initiative_end_date_object = None start_date_object = None end_date_object = None # Confirm we have an initiative start date; if we don't have that all bets are off anyways # check start date of epic; if we don't have that, we yield to the start date of the initiative # if we do have it, we still need to sanity check that the epic doesn't start before the initiatve; if so assume the start date is the # initiative. # If we don't have that, we set the start date to the same month as the end_date if getattr(initiative.initiative.fields, START_DATE_KEY) is None: skipped_epics.append(epic) continue else: initiative_start_date_object = datetime.datetime.strptime( getattr(initiative.initiative.fields, START_DATE_KEY), "%Y-%m-%d") if getattr(epic.epic.fields, START_DATE_KEY) is not None: start_date_object = datetime.datetime.strptime( getattr(epic.epic.fields, START_DATE_KEY), "%Y-%m-%d") if start_date_object < initiative_start_date_object: start_date_object = initiative_start_date_object if epic.epic.fields.duedate is not None: end_date_object = datetime.datetime.strptime( epic.epic.fields.duedate, "%Y-%m-%d") elif initiative.initiative.fields.duedate is not None: end_date_object = datetime.datetime.strptime( initiative.initiative.fields.duedate, "%Y-%m-%d") else: skipped_epics.append(epic) continue # TODO - but... whyyyyyyyy ;-; if getattr(epic.epic.fields, START_DATE_KEY) is None: start_date_object = datetime.datetime( end_date_object.year, end_date_object.month, end_date_object.day) total_delta_days = (end_date_object - start_date_object).days + 1 summed_calc_total_delta_days = ( end_date_object - datetime.datetime.today() ).days + 1 if start_date_object < datetime.datetime.today( ) and datetime.datetime.today( ) < end_date_object else total_delta_days delta_months = diff_month(end_date_object, start_date_object) + 1 itr_date = start_date_object for i in range(delta_months): end_date = get_current_month_end_date( itr_date, end_date_object) micro_delta_days = (end_date - itr_date).days + 1 month_distribution_key = str(itr_date.year) + "-" + str( itr_date.month) if month_distribution_key not in month_distributions: month_distributions[ month_distribution_key] = MonthWorkload( itr_date.month, [], [], []) month_distributions[month_distribution_key].epics.append( epic) month_distributions[ month_distribution_key].summed_time.append( EpicIntervalCommitment( initiative.initiative.fields.summary, epic.epic.fields.summary, round( float( epic.summed_time * (micro_delta_days / total_delta_days)), 2))) # remaining time is only pertinent for the section of time after today() if (itr_date < datetime.datetime.today()): # adjust for the case where we are currently calculating this month, wherein we want to provide some partial # counting; if end_date < today, we are over and all work must be done. if (itr_date.month == datetime.datetime.today().month ) and (itr_date.year == datetime.datetime.today().year): micro_delta_days = ( end_date - datetime.datetime.today()).days + 1 if ( end_date_object - datetime.datetime.today() ).days > 0 else summed_calc_total_delta_days else: micro_delta_days = 0 # if this is the last loop, and the nested epic is over, we need to just frontload the entire remaining work into next month month_distributions[ month_distribution_key].remaining_time.append( EpicIntervalCommitment( initiative.initiative.fields.summary, epic.epic.fields.summary, round( float(epic.remaining_time * (micro_delta_days / summed_calc_total_delta_days)), 2))) itr_date = get_next_month_start_date(itr_date, 1) # serialize calendar plan export_capacity_calendar(args.export_estimates_path, month_distributions) print("Calendar render complete.") if args.update_sheets: print("Updating the google sheet...") scope = [ 'https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive' ] credentials = ServiceAccountCredentials.from_json_keyfile_name( args.sheets_service_auth_file, scope) gc = gspread.authorize(credentials) spreadsheet_instance = gc.open_by_url( "https://docs.google.com/spreadsheets/d/1NvyO1Wj-cCMEGwHpPkFgGl14Kdpmz2QR8NDNx18FBAo/edit?usp=sharing" ) alloc = spreadsheet_instance.sheet1 root_cell_row = 50 root_cell_col = 'H' years = [2020, 2021] months = list(range(1, 13)) counter = 0 for year in years: for month in months: new_row = root_cell_row + counter new_cell = '{}{}'.format(root_cell_col, new_row) month_distribution_key = '{}-{}'.format(year, month) if month_distribution_key in month_distributions: ret_dict = month_distributions[ month_distribution_key].dict() alloc.update_acell(new_cell, ret_dict['remaining_time_summary']) counter += 1 alloc.update_acell('G49', 'Updated On: {}'.format(datetime.datetime.today()))