Example #1
0
 def __init__(self):
     self.bz = bugzilla.RHBugzilla(
         url=self.BUGZILLA_API_URL,
         cookiefile=None,
         tokenfile=None,
         use_creds=False,
     )
Example #2
0
def connect(url, user, password):
    '''connect to bugzilla, return the bugzilla connection'''
    ret = bugzilla.RHBugzilla(url=url, user=user, password=password)
    if not ret:
        raise RuntimeError("Couldn't connect to bugzilla: %s %s %s" % (url, username, password))
    logger.debug('connected to bugzilla: %s %s %s %s', url, user, password, ret)
    return ret
Example #3
0
def _get_bugzilla_bug(bug_id):
    """Fetch bug ``bug_id``.

    :param int bug_id: The ID of a bug in the Bugzilla database.
    :return: A FRIGGIN UNDOCUMENTED python-bugzilla THING.
    :raises BugFetchError: If an error occurs while fetching the bug. For
        example, a network timeout occurs or the bug does not exist.

    """
    # Is bug ``bug_id`` in the cache?
    if bug_id in _bugzilla:
        LOGGER.debug('Bugzilla bug {0} found in cache.'.format(bug_id))
    else:
        LOGGER.info('Bugzilla bug {0} not in cache. Fetching.'.format(bug_id))
        # Make a network connection to the Bugzilla server.
        try:
            bz_conn = bugzilla.RHBugzilla()
            bz_conn.connect(BUGZILLA_URL)
        except (TypeError, ValueError):
            raise BugFetchError(
                'Could not connect to {0}'.format(BUGZILLA_URL))
        # Fetch the bug and place it in the cache.
        try:
            _bugzilla[bug_id] = bz_conn.getbugsimple(bug_id)
        except Fault as err:
            raise BugFetchError('Could not fetch bug. Error: {0}'.format(
                err.faultString))
        except ExpatError as err:
            raise BugFetchError('Could not interpret bug. Error: {0}'.format(
                ErrorString(err.code)))

    return _bugzilla[bug_id]
Example #4
0
def is_bzbug(bz_id):
    """Method to test if the bugzilla's bug is open"""

    try:
        mybz = bugzilla.RHBugzilla()
        mybz.connect(BUGZILLA_URL)
    except (TypeError, ValueError):
        logging.warning("Invalid Bugzilla ID {0}".format(bz_id))
        return False

    attempts = 0
    mybug = None
    while attempts < 3 and mybug is None:
        try:
            mybug = mybz.getbugsimple(bz_id)
        except ExpatError:
            attempts += 1
        except Fault:
            return True

    if mybug is None:
        return True

    if (mybug.status == 'NEW') or (mybug.status == 'ASSIGNED'):
        logging.debug(mybug)
        return True
    else:
        return False
Example #5
0
 def _from_bugzilla(self, bug_id):
     if bugzilla is None:
         return ''
     rhbz = bugzilla.RHBugzilla(url="https://bugzilla.redhat.com/xmlrpc.cgi")
     query = rhbz.build_query(bug_id=bug_id)
     bug = rhbz.query(query)[0]
     return bug.summary
Example #6
0
    def testPostTranslation(self):
        def _testPostCompare(bz, indict, outexpect):
            outdict = indict.copy()
            bz.post_translation({}, outdict)
            self.assertTrue(outdict == outexpect)

            # Make sure multiple calls don't change anything
            bz.post_translation({}, outdict)
            self.assertTrue(outdict == outexpect)

        bug3 = bugzilla.Bugzilla3(url=None, cookiefile=None)
        rhbz = bugzilla.RHBugzilla(url=None, cookiefile=None)

        test1 = {
            "component": ["comp1"],
            "version": ["ver1", "ver2"],

            'flags': [{
                'is_active': 1,
                'name': 'qe_test_coverage',
                'setter': '*****@*****.**',
                'status': '?',
            }, {
                'is_active': 1,
                'name': 'rhel-6.4.0',
                'setter': '*****@*****.**',
                'status': '+',
            }],

            'alias': ["FOO", "BAR"],
            'blocks': [782183, 840699, 923128],
            'keywords': ['Security'],
            'groups': ['redhat'],
        }

        out_simple = test1.copy()
        out_simple["components"] = out_simple["component"]
        out_simple["component"] = out_simple["components"][0]
        out_simple["versions"] = out_simple["version"]
        out_simple["version"] = out_simple["versions"][0]

        out_complex = out_simple.copy()
        out_complex["keywords"] = ",".join(out_complex["keywords"])
        out_complex["blocks"] = ",".join([str(b) for b in
                                          out_complex["blocks"]])
        out_complex["alias"] = ",".join(out_complex["alias"])
        out_complex["groups"] = [{'description': 'redhat',
                                  'ison': 1, 'name': 'redhat'}]
        out_complex["flags"] = "qe_test_coverage?,rhel-6.4.0+"

        _testPostCompare(bug3, test1, test1)
        _testPostCompare(rhbz, test1, out_simple)

        rhbz.rhbz_back_compat = True
        _testPostCompare(rhbz, test1, out_complex)
Example #7
0
def _get_bugzilla_bug(bug_id):
    """Fetch bug ``bug_id``.

    :param int bug_id: The ID of a bug in the Bugzilla database.
    :return: A FRIGGIN UNDOCUMENTED python-bugzilla THING.
    :raises BugFetchError: If an error occurs while fetching the bug. For
        example, a network timeout occurs or the bug does not exist.

    """
    # Is bug ``bug_id`` in the cache?
    if bug_id in _bugzilla:
        LOGGER.debug('Bugzilla bug {0} found in cache.'.format(bug_id))
    else:
        LOGGER.info('Bugzilla bug {0} not in cache. Fetching.'.format(bug_id))
        # Make a network connection to the Bugzilla server.
        bz_credentials = {}
        if setting_is_set('bugzilla'):
            bz_credentials = settings.bugzilla.get_credentials()
        try:
            bz_conn = bugzilla.RHBugzilla(url=BUGZILLA_URL, **bz_credentials)
        except (TypeError, ValueError):  # pragma: no cover
            raise BugFetchError(
                'Could not connect to {0}'.format(BUGZILLA_URL)
            )
        # Fetch the bug and place it in the cache.
        try:
            _bugzilla[bug_id] = bz_conn.getbug(
                bug_id,
                include_fields=['id', 'status', 'whiteboard', 'flags']
            )
            if not bz_credentials:
                raise BZUnauthenticatedCall(
                    _bugzilla[bug_id],
                    'Unauthenticated call made to BZ API, no flags data will '
                    'be available'
                )
        except Fault as err:
            raise BugFetchError(
                'Could not fetch bug. Error: {0}'.format(err.faultString)
            )
        except ExpatError as err:
            raise BugFetchError(
                'Could not interpret bug. Error: {0}'
                .format(ErrorString(err.code))
            )

    return _bugzilla[bug_id]
Example #8
0
    def __init__(self,
                 bugzilla_url,
                 product,
                 release,
                 status,
                 comment=False,
                 dry_run=False):
        self.bugzilla_url = bugzilla_url
        self.product = product
        self.release = release
        self.status = status
        self.dry_run = dry_run
        self.comment = comment

        self.bzapi = bugzilla.RHBugzilla(self.bugzilla_url)
        if not self.bzapi.logged_in:
            logging.error('The application requires bugzilla credentials.')
            sys.exit(1)
Example #9
0
def get_rfes_from_bugzilla(bugs):
    # Open connection into bugzilla
    user_params = parse_config()
    username = user_params.get("user") + "@redhat.com"
    password = user_params.get("password")
    
    #used for local testing
    #if bugs == "":
        #bugs = "1517272" #,1011755,1003044" #1517272"#,
    
    #connect to Bugzilla    
    bz_connection = bugzilla.RHBugzilla(url=BUGZILLA_SERVER)
    bz_connection.login(username,password)
    print "Bugzilla connection: " + str(bz_connection.logged_in)

    # Build RFE query
    query = bz_connection.build_query(
        bug_id = bugs
     )
    bz_rfes = bz_connection.query(query)
    return bz_rfes, bz_connection
def get_rfes_from_bugzilla():
    # Open connection into bugzilla
    user_params = parse_config()
    username = user_params.get("user") + "@redhat.com"
    password = user_params.get("password")

  #  rhbugzilla = bugzilla.RHBugzilla()

    bz_connection = bugzilla.RHBugzilla(url=BUGZILLA_SERVER)
    bz_connection.login(username,password)
    # query

    print "Bugzilla connection: " + str(bz_connection.logged_in)

    query = bz_connection.build_query(
        savedsearch="QE_Test_Coverage_RHOS_7"
    )

    bz_rfes = bz_connection.query(query)

    return bz_rfes, bz_connection
Example #11
0
def bzbug(bz_id):
    """Decorator that skips the test if the bugzilla's bug is open"""

    if bz_id not in _bugzilla:
        try:
            mybz = bugzilla.RHBugzilla()
            mybz.connect(BUGZILLA_URL)
        except (TypeError, ValueError):
            logging.warning("Invalid Bugzilla ID {0}".format(bz_id))
            return lambda func: func

        attempts = 0
        mybug = None
        while attempts < 3 and mybug is None:
            try:
                mybug = mybz.getbugsimple(bz_id)
                _bugzilla[bz_id] = mybug
            except ExpatError:
                attempts += 1
            except Fault as error:
                return unittest.skip(
                    "Test skipped: %s" % error.faultString
                )

        if mybug is None:
            return unittest.skip(
                "Test skipped due to not being able to fetch bug #%s info" %
                bz_id)
    else:
        mybug = _bugzilla[bz_id]

    if (mybug.status == 'NEW') or (mybug.status == 'ASSIGNED'):
        logging.debug(mybug)
        return unittest.skip("Test skipped due to %s" % mybug)
    else:
        return lambda func: func
def get_rfes_from_bugzilla():
    # Open connection into bugzilla
    user_params = parse_config()
    username = user_params.get("user") + "@redhat.com"
    password = user_params.get("password")

    #  rhbugzilla = bugzilla.RHBugzilla()

    bz_connection = bugzilla.RHBugzilla(url=BUGZILLA_SERVER)
    bz_connection.login(username, password)
    # Build RFE query
    #https: // bugzilla.redhat.com / buglist.cgi?action = wrap & bug_status = NEW & bug_status = ASSIGNED & bug_status = POST & bug_status = MODIFIED & bug_status = ON_DEV & bug_status = ON_QA & bug_status = VERIFIED & classification = Red % 20
    #Hat & f1 = component & f2 = cf_devel_whiteboard & f3 = cf_devel_whiteboard & keywords = FutureFeature % 2
    #C % 20 & keywords_type = allwords & list_id = 8251986 & o1 = notsubstring & o2 = substring & o3 = notsubstring & product = Red % 20
    #Hat % 20 OpenStack & v1 = doc & v2 = osp13add & v3 = osp13rem

    # https: // bugzilla.redhat.com / buglist.cgi?action = wrap & bug_status = POST & bug_status = MODIFIED & bug_status = ON_QA & bug_status = VERIFIED & classification = Red % 20 Hat &
    # f2 = flagtypes.name &
    # f3 = target_milestone &
    # keywords = FutureFeature % 2C % 20 Triaged % 2C % 20 &
    # keywords_type = allwords &
    # list_id = 9078558 &
    # o2 = regexp &
    # o3 = notequals &
    # product = Red % 20Hat % 20 OpenStack &
    # v2 = rhos. % 2A - 14.0 % 5C % 2B &
    # v3 = ---

    print "Bugzilla connection: " + str(bz_connection.logged_in)

    query = bz_connection.build_query(quicksearch="1241596")
    # exclude: 1475556, 1288035 , 1375207, 1467591, 1512686, 1524393, 1524402

    bz_rfes = bz_connection.query(query)

    return bz_rfes, bz_connection
Example #13
0
    def testUserAgent(self):
        b3 = bugzilla.Bugzilla3(url=None, cookiefile=None)
        rhbz = bugzilla.RHBugzilla(url=None, cookiefile=None)

        self.assertTrue(b3.user_agent.endswith("Bugzilla3/0.1"))
        self.assertTrue(rhbz.user_agent.endswith("RHBugzilla/0.1"))
Example #14
0
 def _get_connection(self):
     if not self._connection:
         bz_conn = bugzilla.RHBugzilla(url=BUGZILLA_URL, **self.credentials)
         self._connection = bz_conn
     return self._connection
Example #15
0
def main():
    """ Main """

    argparser = argparse.ArgumentParser(description='Report validation result to bugzilla')
    argparser.add_argument('--bugzilla-component',
                           default="images", help='use specified bugzilla component')
    argparser.add_argument('--bugzilla-product',
                           default="Cloud Image Validation", help='use specified bugzilla product')
    argparser.add_argument('--bugzilla-url',
                           default="https://bugzilla.redhat.com/xmlrpc.cgi", help='use specified bugzilla xmlrpc url')
    argparser.add_argument('--config',
                           default="/etc/validation.yaml", help='use supplied yaml config file')
    argparser.add_argument('--debug', action='store_const', const=True,
                           default=False, help='debug mode')
    argparser.add_argument('--result', help='yaml file with validation result', required=True)
    argparser.add_argument('--test', action='store_const', const=True,
                           default=False, help='report to stdout instead of bugzilla')
    argparser.add_argument('-y', '--yaml-summary', help='provide info in yaml',
                           action='store_const', default=False, const=True)
    argparser.add_argument('-a', '--all-commands', help='show all commands in bugzillas not just failed',
                           action='store_true')

    args = argparser.parse_args()

    resultd = open(args.result, 'r')
    try:
        from yaml import CLoader as Loader
    except ImportError:
        from yaml import Loader
    result = yaml.load(resultd, Loader=Loader)
    resultd.close()

    summary = Summary(yaml_summary=args.yaml_summary)

    if not args.test:
        confd = open(args.config, 'r')
        yamlconfig = yaml.load(confd)
        confd.close()

        bugzilla_user = yamlconfig["bugzilla"]["user"]
        bugzilla_password = yamlconfig["bugzilla"]["password"]
        bzid = bugzilla.RHBugzilla(url=args.bugzilla_url, user=bugzilla_user, password=bugzilla_password)
        if not bzid:
            print "Failed to connect to bugzilla!"
            sys.exit(1)

    for ami in result:
        ami_fd = tempfile.NamedTemporaryFile()
        ami_fd.write(yaml.safe_dump(ami))
        ami_fd.seek(0)
        overall_result, bug_summary, info, bug_description = valid_result.get_overall_result(ami, verbose=args.all_commands)
        bugnr = None

        if not args.test:
            bzobject = bzid.createbug(product=args.bugzilla_product,
                                       component=args.bugzilla_component,
                                       version="RHEL" + ami["version"],
                                       rep_platform=ami["arch"],
                                       summary=bug_summary,
                                       op_sys="Linux",
                                       keywords=["TestOnly"])
            if not bzobject:
                print "Failed to create bug in bugzilla!"
                sys.exit(1)

            bugid = str(bzobject.bug_id)
            attach_name = ami["ami"] + ".yaml"
            bzid.attachfile(bugid, ami_fd, attach_name, filename=attach_name, contenttype="text/yaml", ispatch=False)
            # FIXME: check previous call result
            bug = None
            for ntry in xrange(10):
                try:
                    bug = bzid.getbug(bugid)
                    break
                except:
                    # bug not found, retry
                    time.sleep(10)
            if bug:
                bugnr = bug.id
                for comment in bug_description:
                    bug.addcomment(comment)
                bug.setstatus("VERIFIED" if overall_result == "succeeded" else "ON_QA")
        else:
            print info
            print '\n'.join(bug_description)

        summary.add(ami['ami'], bug=bugnr, status='pass' if overall_result == "succeeded" else 'fail')
        ami_fd.close()
    print summary
Example #16
0
    def run(self):
        reponames = self.opts.py3query_repo
        if not reponames:
            reponames = ['rawhide']
        self.base_query = self.base.sack.query()
        self.pkg_query = self.base_query.filter(reponame=list(reponames))
        source_reponames = [n + '-source' for n in reponames]
        self.src_query = self.base_query.filter(reponame=source_reponames).filter(arch=['src'])

        # python_versions: {package: set of Python versions}
        python_versions = collections.defaultdict(set)
        # rpm_pydeps: {package: set of dep names}
        rpm_pydeps = collections.defaultdict(set)
        # dep_versions: {dep name: Python version}
        dep_versions = collections.defaultdict(set)
        for n, seeds in SEED_PACKAGES.items():
            provides = sorted(self.all_provides(reponames, seeds), key=str)

            # This effectively includes packages that still need
            # Python 3.4 while Rawhide only provides Python 3.5
            provides += sorted(seeds)

            for dep in progressbar(provides, 'Getting py{} requires'.format(n)):
                dep_versions[str(dep)] = n
                for pkg in self.whatrequires(dep, self.pkg_query):
                    python_versions[pkg].add(n)
                    rpm_pydeps[pkg].add(str(dep))

        # Add packages with 'python?' as a component of their name, if they
        # haven't been added as dependencies
        for name, version in {
            'python': 0,
            'python2': 2,
            'python3': 3,
        }.items():
            for pattern in '{}-*', '*-{}', '*-{}-*':
                name_glob = pattern.format(name)
                query = self.pkg_query.filter(name__glob=name_glob)
                message = 'Getting {} packages'.format(name_glob)
                for pkg in progressbar(query, message):
                    if pkg.sourcerpm.startswith('mingw-'):
                        # Ignore mingw packages
                        continue
                    if pkg not in python_versions:
                        python_versions[pkg].add(version)

        # srpm_names: {package: srpm name}
        # by_srpm_name: {srpm name: set of packages}
        srpm_names = {}
        by_srpm_name = collections.defaultdict(set)
        # repo_srpms: {repo name: set of srpm names}
        repo_srpms = {}
        for pkg in progressbar(python_versions.keys(), 'Getting SRPMs'):
            srpm_name = get_srpm_name(pkg)
            srpm_names[pkg] = srpm_name
            by_srpm_name[srpm_name].add(pkg)
            repo_srpms.setdefault(pkg.reponame, set()).add(srpm_name)

        # deps_of_pkg: {package: set of packages}
        deps_of_pkg = collections.defaultdict(set)
        # build_deps_of_srpm: {srpm: set of packages}
        build_deps_of_srpm = collections.defaultdict(set)
        # requirers_of_pkg: {package: set of srpm}
        requirers_of_pkg = collections.defaultdict(set)
        # build_requirers_of_pkg: {pkg: set of srpm}
        build_requirers_of_pkg = collections.defaultdict(set)
        # all_provides: {provide_name: package}
        all_provides = {str(r).split()[0]: p for p in python_versions for r in p.provides
                        if not str(r).startswith(PROVIDES_BLACKLIST)}
        for pkg in progressbar(sorted(python_versions.keys()), 'Getting requirements'):
            reqs = set()
            build_reqs = set()
            for provide in pkg.provides:
                reqs.update(self.whatrequires(provide, self.pkg_query))
                build_reqs.update(self.whatrequires(provide, self.src_query))

            for req in reqs:
                if req in python_versions.keys():
                    deps_of_pkg[req].add(pkg)
                # Both Python and non-Python packages here.
                requirers_of_pkg[pkg].add(req)

            for req in build_reqs:
                if req.name in by_srpm_name.keys():
                    build_deps_of_srpm[req.name].add(pkg)
                # Both Python and non-Python packages here.
                build_requirers_of_pkg[pkg].add(req)

        # unversioned_requirers: {srpm_name: set of srpm_names}
        unversioned_requirers = collections.defaultdict(set)
        for pkg in progressbar(set.union(*requirers_of_pkg.values(), *build_requirers_of_pkg.values()),
                               'Processing packages with ambiguous dependencies'):
            # Ignore packages that are:
            if (python_versions.get(pkg) == {3} or  # Python 3 only
                    pkg.name.endswith('-doc')):  # Documentation
                continue
            for require in (pkg.requires + pkg.requires_pre + pkg.recommends +
                            pkg.suggests + pkg.supplements + pkg.enhances):
                require = str(require).split()[0]
                requirement = all_provides.get(require)
                if (is_unversioned(require) and requirement and not
                        (require.endswith('-doc') or python_versions.get(requirement) == {3})
                        and require != 'python-unversioned-command'):
                    requirement_srpm_name = get_srpm_name(requirement)
                    requirer_srpm_name = get_srpm_name(pkg)
                    unversioned_requirers[requirement_srpm_name].add(requirer_srpm_name)

        # deps_of_pkg: {srpm name: info}
        json_output = dict()
        for name in progressbar(by_srpm_name, 'Generating output'):
            pkgs = sorted(by_srpm_name[name])
            r = json_output[name] = {}
            r['rpms'] = {
                format_rpm_name(p): {
                    'py_deps': {str(d): dep_versions[d] for d in rpm_pydeps[p]},
                    'non_python_requirers': {
                        'build_time': sorted(get_srpm_names(build_requirers_of_pkg[p]) - by_srpm_name.keys()),
                        'run_time': sorted(get_srpm_names(requirers_of_pkg[p]) - by_srpm_name.keys()),
                    },
                    'almost_leaf': (
                        # is Python 2 and is not required by anything EXCEPT
                        # sibling subpackages
                        2 in python_versions[p] and
                        not get_srpm_names(build_requirers_of_pkg[p] | requirers_of_pkg[p]) - {name}
                    ),
                    'legacy_leaf': (
                        # is Python 2 and is not required by anything
                        2 in python_versions[p] and
                        not get_srpm_names(build_requirers_of_pkg[p] | requirers_of_pkg[p])
                    ),
                } for p in pkgs}
            set_status(r, pkgs, python_versions)

            r['deps'] = sorted(set(srpm_names[d]
                                   for p in pkgs
                                   for d in deps_of_pkg.get(p, '')
                                   if srpm_names[d] != name))
            r['build_deps'] = sorted(set(srpm_names[d]
                                         for d in build_deps_of_srpm.get(name, '')
                                         if srpm_names[d] != name))
            if unversioned_requirers.get(name):
                r['unversioned_requirers'] = sorted(unversioned_requirers[name])

        # add Bugzilla links
        if self.opts.fetch_bugzilla:
            bar = iter(progressbar(['connecting', 'tracker', 'individual'],
                                   'Getting bugs'))

            next(bar)
            bz = bugzilla.RHBugzilla(BUGZILLA_URL)

            next(bar)
            include_fields = ['id', 'depends_on', 'blocks', 'component',
                              'status', 'resolution', 'last_change_time']
            trackers = bz.getbugs(TRACKER_BUG_IDS,
                                  include_fields=include_fields)
            all_ids = [b for t in trackers for b in t.depends_on]

            next(bar)
            bugs = bz.getbugs(all_ids, include_fields=include_fields)
            bar.close()

            def bug_namegetter(bug):
                return '{bug.id} {bug.status} {bug.component}'.format(bug=bug)

            rank = ['NEW', 'ASSIGNED', 'POST', 'MODIFIED', 'ON_QA', 'VERIFIED',
                    'RELEASE_PENDING', 'CLOSED']

            def key(bug):
                return rank.index(bug.status), bug.last_change_time

            bugs = sorted(bugs, key=key)

            for bug in progressbar(bugs, 'Merging bugs',
                                   namegetter=bug_namegetter):
                r = json_output.get(bug.component, {})
                if 'bug' in r.get('links', {}):
                    continue
                status = bug.status
                if bug.resolution:
                    status += ' ' + bug.resolution
                # Let's get the datetime of the last comment and convert to string
                last_change_datetime = time.strftime(
                    '%Y-%m-%d %H:%M:%S',
                    bug.last_change_time.timetuple())
                r.setdefault('links', {})['bug'] = [
                    bug.weburl, status, last_change_datetime]

                for tb in bug.blocks:
                    if tb in ADDITIONAL_TRACKER_BUGS:
                        r.setdefault('tracking_bugs', []).append(
                            BUGZILLA_BUG_URL.format(tb))

                if (any(tb in bug.blocks for tb in MISPACKAGED_TRACKER_BUG_IDS) and
                        r.get('status') == 'idle' and
                        status not in NOTABUG_STATUSES):
                    r['status'] = "mispackaged"
                    r['note'] = ('There is a problem in Fedora packaging, '
                                 'not necessarily with the software itself. '
                                 'See the linked Fedora bug.')

        # Print out output

        if self.opts.output:
            with open(self.opts.output, 'w') as f:
                json.dump(json_output, f, indent=2, sort_keys=True)
        else:
            json.dump(json_output, sys.stdout, indent=2, sort_keys=True)
            sys.stdout.flush()

        # Write out a groups.json
        if self.opts.repo_groups_file:
            output = {repo_name: {'name': repo_name,
                                  'packages': sorted(srpm_names)}
                      for repo_name, srpm_names in repo_srpms.items()}
            with open(self.opts.repo_groups_file, 'w') as f:
                json.dump(output, f, indent=2, sort_keys=True)
        data['months'][-1]['month'] += (" (%d)" % curcount)

    write_html(loader, 'bymonth.html', data, tmpdir, 'NEW.html')

    return data['count']


if __name__ == '__main__':
    options = parse_commandline()
    verbose = options.verbose
    config = parse_config(options.configfile)
    if config['maxpackages']:
        maxpackages = int(config['maxpackages'])
    dbprint("Limiting to {0} packages".format(maxpackages))
    bz = bugzilla.RHBugzilla(url=config['url'],
                             cookiefile=None,
                             user=config['username'],
                             password=config['password'])
    t = time.time()
    (bugs, bugdata, usermap) = run_query(bz)
    querytime = time.time() - t

    # Don't bother running this stuff until the query completes, since it fails
    # so often.
    loader = TemplateLoader(options.templdir)
    tmpdir = tempfile.mkdtemp(dir=options.dirname)

    # The initial set of substitutions that's shared between the report functions
    subs = {
        'update': datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),
        'querytime': querytime,
        'version': VERSION,
Example #18
0
#!/usr/bin/env python3

from __future__ import print_function
import bugzilla
import os
import sys


api_key = None
try:
    api_key = open(os.path.expanduser('~/.bugzilla_apikey')).read().strip()
except FileNotFoundError:
    #  api key in ~/.config/python-bugzilla/bugzillarc will be used
    pass

bz = bugzilla.RHBugzilla(url=None, api_key=api_key)
bz.connect('https://bugzilla.redhat.com')

PRODUCT_NAME = "OpenShift Container Platform"
try:
    TARGET_RELEASE = sys.argv[1]  # e.g. "4.5.0"
except IndexError:
    sys.exit("Target Release required, e.g. '4.5.0'")

query = bz.build_query(
    product=PRODUCT_NAME,
    status=["MODIFIED", "ON_QA", "VERIFIED"],
    target_release=TARGET_RELEASE,
    keywords=["Security", "SecurityTracking"],
)
Example #19
0
import pytest

import bugzilla
from bugzilla import Bugzilla
import tests


RHURL = tests.CLICONFIG.REDHAT_URL or "partner-bugzilla.redhat.com"


def _split_int(s):
    return [int(i) for i in s.split(",")]


if not bugzilla.RHBugzilla(url=RHURL).logged_in:
    print("R/W tests require cached login credentials for url=%s" % RHURL)
    sys.exit(1)


class RHPartnerTest(unittest.TestCase):
    # Despite its name, this instance is simply for bugzilla testing,
    # doesn't send out emails and is blown away occasionally. The front
    # page has some info.
    url = RHURL
    bzclass = bugzilla.RHBugzilla

    def _check_have_admin(self, bz, funcname):
        # groupnames is empty for any user if our logged in user does not
        # have admin privs.
        # Check a known account that likely won't ever go away
Example #20
0
    def run(self, args):
        if self.opts.help_cmd:
            print(self.parser.format_help())
            return

        reponame = self.opts.py3query_repo
        self.base_query = self.base.sack.query()
        self.pkg_query = self.base_query.filter(reponame=reponame)
        self.src_query = self.base_query.filter(reponame=reponame +
                                                '-source').filter(arch=['src'])

        # python_versions: {package: set of Python versions}
        python_versions = collections.defaultdict(set)
        # rpm_pydeps: {package: set of dep names}
        rpm_pydeps = collections.defaultdict(set)
        # dep_versions: {dep name: Python version}
        dep_versions = collections.defaultdict(set)
        for n, seeds in SEED_PACKAGES.items():
            provides = sorted(self.all_provides(reponame, seeds), key=str)

            # This effectively includes packages that still need
            # Python 3.4 while Rawhide only provides Python 3.5
            provides += sorted(seeds)

            for dep in progressbar(provides,
                                   'Getting py{} requires'.format(n)):
                dep_versions[str(dep)] = n
                for pkg in self.whatrequires(dep):
                    python_versions[pkg].add(n)
                    rpm_pydeps[pkg].add(str(dep))

        # srpm_names: {package: srpm name}
        # by_srpm_name: {srpm name: set of packages}
        srpm_names = {}
        by_srpm_name = collections.defaultdict(set)
        for pkg in progressbar(python_versions.keys(), 'Getting SRPMs'):
            srpm_name = hawkey.split_nevra(pkg.sourcerpm).name
            srpm_names[pkg] = srpm_name
            by_srpm_name[srpm_name].add(pkg)

        # deps_of_pkg: {package: set of packages}
        deps_of_pkg = collections.defaultdict(set)
        all_provides = {
            str(r): r
            for p in python_versions for r in p.provides
            if not str(r).startswith(PROVIDES_BLACKLIST)
        }
        for pkg in progressbar(sorted(python_versions.keys()),
                               'Getting requirements'):
            reqs = set()
            for provide in pkg.provides:
                reqs.update(self.whatrequires(provide))
            for req in reqs:
                if req in python_versions.keys():
                    deps_of_pkg[req].add(pkg)

        # deps_of_pkg: {srpm name: info}
        json_output = dict()
        for name in progressbar(by_srpm_name, 'Generating output'):
            pkgs = sorted(by_srpm_name[name])
            r = json_output[name] = {}
            set_status(r, pkgs, python_versions)
            r['rpms'] = {
                format_rpm_name(p):
                {str(d): dep_versions[d]
                 for d in rpm_pydeps[p]}
                for p in pkgs
            }
            r['deps'] = sorted(
                set(srpm_names[d] for p in pkgs
                    for d in deps_of_pkg.get(p, '') if srpm_names[d] != name))

        # add Bugzilla links
        if self.opts.fetch_bugzilla:
            bar = iter(
                progressbar(['connecting', 'tracker', 'individual'],
                            'Getting bugs'))

            next(bar)
            bz = bugzilla.RHBugzilla(BUGZILLA_URL)

            next(bar)
            include_fields = [
                'id', 'depends_on', 'blocks', 'component', 'status',
                'resolution'
            ]
            trackers = bz.getbugs(TRACKER_BUG_IDS,
                                  include_fields=include_fields)
            all_ids = [b for t in trackers for b in t.depends_on]

            next(bar)
            bugs = bz.getbugs(all_ids, include_fields=include_fields)
            bar.close()

            def bug_namegetter(bug):
                return '{bug.id} {bug.status} {bug.component}'.format(bug=bug)

            for bug in progressbar(bugs,
                                   'Merging bugs',
                                   namegetter=bug_namegetter):
                r = json_output.get(bug.component, {})
                url = '{bug.weburl}#{bug.status}'.format(bug=bug)
                status = bug.status
                if bug.resolution:
                    status += ' ' + bug.resolution
                r.setdefault('links', {})['bug'] = [bug.weburl, status]
                inprogress_statuses = ('ASSIGNED', 'POST', 'MODIFIED', 'ON_QA')
                inprogress_resolutions = ('CURRENTRELEASE', 'RAWHIDE',
                                          'ERRATA', 'NEXTRELEASE')

                if r.get('status') == 'idle' and bug.status != 'NEW':
                    r['status'] = 'in-progress'
                elif r.get('status') == 'idle' and bug.status == 'NEW' and \
                        any(tb in bug.blocks for tb in MISPACKAGED_TRACKER_BUG_IDS):
                    r['status'] = "mispackaged"
                    r['note'] = ('There is a problem in Fedora packaging, ' +
                                 'not necessarily with the software itself. ' +
                                 'See the linked Fedora bug.')

        # Print out output

        if self.opts.output:
            with open(self.opts.output, 'w') as f:
                json.dump(json_output, f, indent=2, sort_keys=True)
        else:
            json.dump(json_output, sys.stdout, indent=2, sort_keys=True)
            sys.stdout.flush()
Example #21
0
    def run(self):
        self.base_query = self.base.sack.query()
        self.pkg_query = self.base_query.filter(arch__neq=['src'])
        self.src_query = self.base_query.filter(arch=['src'])

        # python_versions: {package: set of Python versions}
        python_versions = collections.defaultdict(set)
        # rpm_pydeps: {package: set of dep names}
        rpm_pydeps = collections.defaultdict(set)
        # dep_versions: {dep name: Python version}
        dep_versions = collections.defaultdict(set)
        for n, seeds in SEED_PACKAGES.items():
            provides = sorted(self.all_provides(seeds), key=str)

            # This effectively includes packages that still need
            # Python 3.4 while Rawhide only provides Python 3.5
            provides += sorted(seeds)

            for dep in progressbar(provides,
                                   'Getting py{} requires'.format(n)):
                dep_versions[str(dep)] = n
                for pkg in self.whatrequires(dep, self.base_query):
                    python_versions[pkg].add(n)
                    rpm_pydeps[pkg].add(str(dep))

        # Add packages with 'python?' as a component of their name, if they
        # haven't been added as dependencies
        for name, version in {
                'python': 0,
                'python2': 2,
                'python3': 3,
        }.items():
            for pattern in '{}-*', '*-{}', '*-{}-*':
                name_glob = pattern.format(name)
                query = self.base_query.filter(name__glob=name_glob)
                message = 'Getting {} packages'.format(name_glob)
                for pkg in progressbar(query, message):
                    if pkg.sourcerpm and pkg.sourcerpm.startswith('mingw-'):
                        # Ignore mingw packages
                        continue
                    if pkg not in python_versions:
                        python_versions[pkg].add(version)

        # add python27 package manually, it doesn't require Python 2, but it is
        query = self.pkg_query.filter(name='python27')
        for pkg in query:
            python_versions[pkg].add(2)

        # srpm_names: {package: srpm name}
        # by_srpm_name: {srpm name: set of packages}
        srpm_names = {}
        by_srpm_name = collections.defaultdict(set)
        # repo_srpms: {repo name: set of srpm names}
        repo_srpms = {}
        for pkg in progressbar(python_versions.keys(), 'Getting SRPMs'):
            srpm_name = get_srpm_name(pkg)
            srpm_names[pkg] = srpm_name
            by_srpm_name[srpm_name].add(pkg)
            repo_srpms.setdefault(pkg.reponame, set()).add(srpm_name)

        old_misnamed = {}
        old_misnamed_flat = {}
        if self.opts.py3query_misnamed:
            with open(self.opts.py3query_misnamed) as f:
                old_misnamed = yaml.safe_load(f)
            old_misnamed_flat = {
                pk: pr
                for pkg in old_misnamed
                for pr, pk in old_misnamed[pkg].items()
            }

        # deps_of_pkg: {package: set of packages}
        deps_of_pkg = collections.defaultdict(set)
        # build_deps_of_srpm: {srpm: set of packages}
        build_deps_of_srpm = collections.defaultdict(set)
        # requirers_of_pkg: {package: set of srpm}
        requirers_of_pkg = collections.defaultdict(set)
        # build_requirers_of_pkg: {pkg: set of srpm}
        build_requirers_of_pkg = collections.defaultdict(set)
        # all_provides: {provide_name: package}
        all_provides = {
            str(r).split()[0]: p
            for p in python_versions for r in p.provides
            if not str(r).startswith(PROVIDES_BLACKLIST)
        }
        for pkg in progressbar(sorted(python_versions.keys()),
                               'Getting requirements'):
            if python_versions[pkg] == {3}:
                continue
            if pkg.name in NAME_NOTS:
                # "NAME_NOTS" are Python-version-agnostic packages,
                # such as wheels, RPM macros and documentation.
                # Don't track those as python2 dependencies.
                continue
            reqs = set()
            build_reqs = set()
            provides = set(pkg.provides)
            for provide in pkg.provides:
                str_provide = str(provide).split(' ')[0]
                if str_provide in old_misnamed_flat:
                    provides.add(old_misnamed_flat[str_provide])

            for provide in provides:
                reqs.update(self.whatrequires(provide, self.pkg_query))
                build_reqs.update(self.whatrequires(provide, self.src_query))

            for req in reqs:
                if req in python_versions.keys():
                    deps_of_pkg[req].add(pkg)
                # Both Python and non-Python packages here.
                requirers_of_pkg[pkg].add(req)

            for req in build_reqs:
                if req.name in by_srpm_name.keys():
                    build_deps_of_srpm[req.name].add(pkg)
                # Both Python and non-Python packages here.
                build_requirers_of_pkg[pkg].add(req)

        # unversioned_requirers: {srpm_name: set of srpm_names}
        unversioned_requirers = collections.defaultdict(set)
        for pkg in progressbar(
                set.union(*requirers_of_pkg.values(),
                          *build_requirers_of_pkg.values()),
                'Processing packages with ambiguous dependencies'):
            # Ignore packages that are:
            if (python_versions.get(pkg) == {3} or  # Python 3 only
                    pkg.name.endswith('-doc')):  # Documentation
                continue
            for require in (pkg.requires + pkg.requires_pre + pkg.recommends +
                            pkg.suggests + pkg.supplements + pkg.enhances):
                require = str(require).split()[0]

                real_require = require
                try:
                    require = old_misnamed[pkg.name][real_require]
                except KeyError:
                    pass

                requirement = all_provides.get(require)

                if (is_unversioned(real_require) and requirement
                        and not (real_require.endswith('-doc')
                                 or python_versions.get(requirement) == {3})
                        and real_require not in NAME_NOTS
                        and real_require != 'python-unversioned-command'):
                    requirement_srpm_name = get_srpm_name(requirement)
                    requirer_srpm_name = get_srpm_name(pkg)
                    unversioned_requirers[requirement_srpm_name].add(
                        requirer_srpm_name)

        # json_output: {srpm name: info}
        json_output = dict()
        for name in progressbar(by_srpm_name, 'Generating output'):
            pkgs = sorted(by_srpm_name[name])
            r = json_output[name] = {}
            r['rpms'] = {
                format_rpm_name(p): {
                    'py_deps':
                    {str(d): dep_versions[d]
                     for d in rpm_pydeps[p]},
                    'non_python_requirers': {
                        'build_time':
                        sorted(
                            get_srpm_names(build_requirers_of_pkg[p]) -
                            by_srpm_name.keys()),
                        'run_time':
                        sorted(
                            get_srpm_names(requirers_of_pkg[p]) -
                            by_srpm_name.keys()),
                    },
                    'almost_leaf': (
                        # not SRPM and is Python 2 and is not required by anything EXCEPT
                        # sibling subpackages
                        p.arch != 'src' and 2 in python_versions[p] and
                        not get_srpm_names(build_requirers_of_pkg[p]
                                           | requirers_of_pkg[p]) - {name}),
                    'legacy_leaf': (
                        # not SRPM and is Python 2 and is not required by anything
                        p.arch != 'src' and 2 in python_versions[p]
                        and not get_srpm_names(build_requirers_of_pkg[p]
                                               | requirers_of_pkg[p])),
                    'arch':
                    p.arch,
                }
                for p in pkgs
            }
            set_status(r, pkgs, python_versions)

            r['deps'] = sorted(
                set(srpm_names[d] for p in pkgs
                    for d in deps_of_pkg.get(p, '') if srpm_names[d] != name))
            r['build_deps'] = sorted(
                set(srpm_names[d] for d in build_deps_of_srpm.get(name, '')
                    if srpm_names[d] != name))
            if unversioned_requirers.get(name):
                r['unversioned_requirers'] = sorted(
                    unversioned_requirers[name])

        # add Bugzilla links
        if self.opts.fetch_bugzilla:
            bar = iter(
                progressbar(['connecting', 'tracker', 'individual'],
                            'Getting bugs'))

            next(bar)
            bz = bugzilla.RHBugzilla(BUGZILLA_URL)

            next(bar)
            include_fields = [
                'id', 'depends_on', 'blocks', 'component', 'status',
                'resolution', 'last_change_time', 'short_desc'
            ]
            trackers = bz.getbugs(TRACKER_BUGS, include_fields=include_fields)
            all_ids = set(b for t in trackers for b in t.depends_on)

            next(bar)
            bugs = bz.getbugs(all_ids, include_fields=include_fields)
            bar.close()

            def bug_namegetter(bug):
                return '{bug.id} {bug.status} {bug.component}'.format(bug=bug)

            rank = [
                'NEW', 'ASSIGNED', 'POST', 'MODIFIED', 'ON_QA', 'VERIFIED',
                'RELEASE_PENDING', 'CLOSED'
            ]

            def key(bug):
                return rank.index(bug.status), bug.last_change_time

            bugs = sorted(bugs, key=key)

            for bug in progressbar(bugs,
                                   'Merging bugs',
                                   namegetter=bug_namegetter):
                r = json_output.get(bug.component, {})
                bugs = r.setdefault('bugs', {})
                entry = bugs.get(bug.id)
                if not entry:
                    entry = {
                        'url':
                        bug.weburl,
                        'short_desc':
                        bug.short_desc,
                        'status':
                        bug.status,
                        'resolution':
                        bug.resolution,
                        'last_change':
                        time.strftime('%Y-%m-%d %H:%M:%S',
                                      bug.last_change_time.timetuple()),
                        'trackers': [],
                    }
                    for tb in bug.blocks:
                        alias = TRACKER_BUGS.get(tb)
                        if alias:
                            entry['trackers'].append(alias)
                    bugs[bug.id] = entry

        # Print out output

        if self.opts.output:
            with open(self.opts.output, 'w') as f:
                json.dump(json_output, f, indent=2, sort_keys=True)
        else:
            json.dump(json_output, sys.stdout, indent=2, sort_keys=True)
            sys.stdout.flush()

        # Write out a groups.json
        if self.opts.repo_groups_file:
            output = {
                repo_name: {
                    'name': repo_name,
                    'packages': sorted(srpm_names)
                }
                for repo_name, srpm_names in repo_srpms.items()
            }
            with open(self.opts.repo_groups_file, 'w') as f:
                json.dump(output, f, indent=2, sort_keys=True)