Exemplo n.º 1
0
def compare_data(client, data):
    """
    Helper function to loop over values and compare to ensure they are the same
    :param jira.client.JIRA client: JIRA client
    :param Dict data: Data used to compare against
    :return: True/False if we
    """
    # First get our existing JIRA issue
    jira_ticket = data['JIRA']
    existing = client.search_issues(f"Key = {jira_ticket}")

    # Throw an error if too many issues were found
    if len(existing) > 1:
        raise Exception(
            f"Too many issues were found with ticket {jira_ticket}")

    existing = existing[0]
    log.info("TEST - " + existing.fields.summary)
    # Check Tags
    if data['tags'] != existing.fields.labels:
        raise Exception(f"Error when comparing tags for {jira_ticket}\n"
                        f"Expected: {data['tags']}\n"
                        f"Actual: {existing.fields.labels}")

    # Check FixVersion
    formatted_fixVersion = format_fixVersion(existing.fields.fixVersions)

    if data['fixVersions'] != formatted_fixVersion:
        raise Exception(f"Error when comparing fixVersions for {jira_ticket}\n"
                        f"Expected: {data['fixVersions']}\n"
                        f"Actual: {formatted_fixVersion}")

    # Check Assignee
    if not existing.fields.assignee:
        raise Exception(f"Error when comparing assignee for {jira_ticket}\n"
                        f"Expected: {data['assignee']}\n"
                        f"Actual: {existing.fields.assignee}")

    elif data['assignee'] != existing.fields.assignee.name:
        raise Exception(f"Error when comparing assignee for {jira_ticket}\n"
                        f"Expected: {data['assignee']}\n"
                        f"Actual: {existing.fields.assignee.name}")

    # Check Title
    if data['title'] != existing.fields.summary:
        raise Exception(f"Error when comparing title for {jira_ticket}\n"
                        f"Expected: {data['title']}\n"
                        f"Actual: {existing.fields.summary}")

    # Check Descriptions
    if data['description'].replace("\n", "").replace(" ", "").replace(
            "\r", "") != existing.fields.description.replace("\n", "").replace(
                " ", "").replace("\r", ""):
        raise Exception(
            f"Error when comparing descriptions for {jira_ticket}\n"
            f"Expected: {data['description']}\n"
            f"Actual: {existing.fields.description}")
Exemplo n.º 2
0
def check_jira_status(client):
    """
    Function tests the status of the JIRA server.


    :param jira.client.JIRA client: JIRA client
    :return: True/False if the server is up
    :rtype: Bool
    """
    # Search for any issue remote title
    ret = client.search_issues("issueFunction in linkedIssuesOfRemote('*')")
    if len(ret) < 1:
        # If we did not find anything return false
        return False
    return True
Exemplo n.º 3
0
def _get_existing_jira_issue_legacy(client, issue, config):
    """
    This is our old way of matching issues: use the special url field.
    This will be phased out and removed in a future release.

    """

    kwargs = dict(issue.downstream.items())
    kwargs["External issue URL"] = "%s" % issue.url
    kwargs = sorted(kwargs.items(), key=operator.itemgetter(0))

    query = " AND ".join([
        "=".join(["'%s'" % k, "'%s'" % v]) for k, v in kwargs
        if v is not None
    ]) + " AND (resolution is null OR resolution = Duplicate)"
    results = client.search_issues(query)
    if results:
        return results[0]
    else:
        return None
Exemplo n.º 4
0
def matching_jira_issue_query(client, issue, config, free=False):
    """
    API calls that find matching JIRA tickets if any are present
    Args:
        client (jira.client.JIRA): JIRA client
        issue (jibe.intermediary.Issue): Issue object
        config (dict): Config dict
        free (Bool): Free tag to add 'statusCategory != Done' to query
    Returns:
        results (lst): Returns a list of matching JIRA issues if any are found
    """
    # Searches for any remote link to the issue.url
    query = 'issueFunction in linkedIssuesOfRemote("%s") and ' \
        'issueFunction in linkedIssuesOfRemote("%s")' % (
            remote_link_title, issue.url)
    if free:
        query += ' and statusCategory != Done'
    # Query the JIRA client and store the results
    results_of_query = client.search_issues(query)

    if len(results_of_query) > 1:
        # Sometimes if an issue gets dropped it is created with the
        # url: pagure.com/something/issue/5
        # Then when that issue is dropped and another one is
        # created is is created with the same
        # url : pagure.com/something/issue/5.
        # We need to ensure that we are not catching a dropped issue
        # Loop through the results of the query and make sure the ids match
        final_results = []
        for result in results_of_query:
            # If the queried JIRA issue has the id of the upstream
            # issue or the same title
            if issue.id in result.fields.description or issue.title == \
                    result.fields.summary or \
                    issue.upstream_title == result.fields.summary:
                search = check_comments_for_duplicate(
                    client, result, find_username(issue, config))
                if search is True:
                    final_results.append(result)
                else:
                    # Else search returned a linked issue
                    final_results.append(search)
            # If that's not the case, check if they have the same upstream title
            # Upstream username/repo can change if repos are merged
            elif re.search(
                    r"\[[a-zA-Z0-9!@#$%^&*()_+\-=\[\]{};':\
            \|,.<>\/?]*\] " + issue.upstream_title, result.fields.summary):
                search = check_comments_for_duplicate(
                    client, result, find_username(issue, config))
                if search is True:
                    # We went through all the comments and didn't find anything
                    # that indicated it was a duplicate
                    log.warning('   Matching downstream issue '
                                '%s to upstream issue %s' %
                                (result.fields.summary, issue.title))
                    final_results.append(result)
                else:
                    # Else search returned a linked issue
                    final_results.append(search)
        if not final_results:
            # Just return the most updated issue
            results_of_query.sort(key=lambda x: datetime.strptime(
                x.fields.updated, '%Y-%m-%dT%H:%M:%S.%f+0000'))
            final_results.append(results_of_query[0])

        # Return the final_results
        log.debug("Found %i results for query %r", len(final_results), query)
        return final_results
    else:
        return results_of_query
Exemplo n.º 5
0
def _matching_jira_issue_query(client, issue, config, free=False):
    """
    API calls that find matching JIRA tickets if any are present.

    :param jira.client.JIRA client: JIRA client
    :param sync2jira.intermediary.Issue issue: Issue object
    :param Dict config: Config dict
    :param Bool free: Free tag to add 'statusCategory != Done' to query
    :returns: results: Returns a list of matching JIRA issues if any are found
    :rtype: List
    """
    # Searches for any remote link to the issue.url
    issue_title = issue.title.replace('[', '').replace(']', '').replace('"', '').replace('(', '').replace(')', '')
    query = f'summary ~ "{issue_title}" OR text ~ "{issue.url}"'

    # Query the JIRA client and store the results
    results_of_query = client.search_issues(query)
    if len(results_of_query) > 1:
        # Sometimes if an issue gets dropped it is created with the url: pagure.com/something/issue/5
        # Then when that issue is dropped and another one is created is is created with the same
        # url : pagure.com/something/issue/5.
        # We need to ensure that we are not catching a dropped issue
        # Loop through the results of the query and make sure the ids match
        final_results = []

        for result in results_of_query:
            description = result.fields.description or ""
            summary = result.fields.summary or ""
            if issue.id in description or issue.title == summary:
                search = check_comments_for_duplicate(client, result,
                                                      find_username(issue, config))
                if search is True:
                    final_results.append(result)
                else:
                    # Else search returned a linked issue
                    final_results.append(search)
            # If that's not the case, check if they have the same upstream title
            # Upstream username/repo can change if repos are merged
            elif re.search(r"\[[a-zA-Z0-9!@#$%^&*()_+\-=\[\]{};':\\|,.<>\/?]*\] "
                           + issue.upstream_title,
                           result.fields.summary):
                search = check_comments_for_duplicate(client, result,
                                                      find_username(issue, config))
                if search is True:
                    # We went through all the comments and didn't find anything
                    # that indicated it was a duplicate
                    log.warning('Matching downstream issue %s to upstream issue %s' %
                                (result.fields.summary, issue.title))
                    final_results.append(result)
                else:
                    # Else search returned a linked issue
                    final_results.append(search)
        if not final_results:
            # Just return the most updated issue
            results_of_query.sort(key=lambda x: datetime.strptime(
                x.fields.updated, '%Y-%m-%dT%H:%M:%S.%f+0000'))
            final_results.append(results_of_query[0])

        # Return the final_results
        log.debug("Found %i results for query %r", len(final_results), query)

        return final_results
    else:
        return results_of_query