def _resolve_modules(self, compose_source):
        koji_session = get_koji_session(self.workflow, fallback=NO_FALLBACK)

        resolved_modules = {}
        for module in compose_source.strip().split():
            module_spec = ModuleSpec.from_str(module)
            build, rpm_list = get_koji_module_build(koji_session, module_spec)

            # The returned RPM list contains source RPMs and RPMs for all
            # architectures.
            rpms = ['{name}-{epochnum}:{version}-{release}.{arch}.rpm'
                    .format(epochnum=rpm['epoch'] or 0, **rpm)
                    for rpm in rpm_list]

            objects = Modulemd.objects_from_string(
                build['extra']['typeinfo']['module']['modulemd_str'])
            assert len(objects) == 1
            mmd = objects[0]
            assert isinstance(mmd, Modulemd.Module)
            # Make sure we have a version 2 modulemd file
            mmd.upgrade()

            resolved_modules[module_spec.name] = ModuleInfo(module_spec.name,
                                                            module_spec.stream,
                                                            module_spec.version,
                                                            mmd, rpms)
        return resolved_modules
    def load(self):
        if self.repo is None:
            raise LoadCacheException(self.repo)

        yaml_file_path = None
        if not self._metadata_fn:
            repodata_dir = self.repo._cachedir + "/repodata/"
            files = os.listdir(repodata_dir)
            for file in files:
                if "modules.yaml" in file:
                    yaml_file_path = repodata_dir + file
                    break

        if not self._metadata_fn and not yaml_file_path:
            raise MissingYamlException(self.repo._cachedir)

        openfunc = open
        if (self._metadata_fn and self._metadata_fn.endswith('.gz')) \
                or (yaml_file_path and yaml_file_path.endswith('.gz')):
            openfunc = gzip.open
        with openfunc(self._metadata_fn or yaml_file_path,
                      "r") as modules_yaml_fd:
            modules_yaml = modules_yaml_fd.read()

        if PY3 and isinstance(modules_yaml, bytes):
            modules_yaml = modules_yaml.decode("utf-8")

        return Modulemd.objects_from_string(modules_yaml)
    def _resolve_modules(self, compose_source):
        koji_session = get_koji_session(self.workflow, fallback=NO_FALLBACK)

        resolved_modules = {}
        for module in compose_source.strip().split():
            module_spec = split_module_spec(module)
            build, rpm_list = get_koji_module_build(koji_session, module_spec)

            # The returned RPM list contains source RPMs and RPMs for all
            # architectures.
            rpms = [
                '{name}-{epochnum}:{version}-{release}.{arch}.rpm'.format(
                    epochnum=rpm['epoch'] or 0, **rpm) for rpm in rpm_list
            ]

            objects = Modulemd.objects_from_string(
                build['extra']['typeinfo']['module']['modulemd_str'])
            assert len(objects) == 1
            mmd = objects[0]
            assert isinstance(mmd, Modulemd.Module)
            # Make sure we have a version 2 modulemd file
            mmd.upgrade()

            resolved_modules[module_spec.name] = ModuleInfo(
                module_spec.name, module_spec.stream, module_spec.version, mmd,
                rpms)
        return resolved_modules
Example #4
0
    def _resolve_modules(self, compose_source):
        resolved_modules = {}
        # The effect of develop=True is that requests to the PDC are made without authentication;
        # since we our interaction with the PDC is read-only, this is fine for our needs and
        # makes things simpler.
        pdc_client = get_pdc_session(self.workflow, self.pdc_fallback)

        for module_spec in compose_source.strip().split():
            try:
                module = split_module_spec(module_spec)
                if not module.version:
                    raise RuntimeError
            except RuntimeError:
                raise RuntimeError("Cannot parse resolved module in compose: %s" % module_spec)

            query = {
                'variant_id': module.name,
                'variant_version': module.stream,
                'variant_release': module.version,
                'active': True,
            }

            self.log.info("Looking up module metadata for '%s' in the PDC", module_spec)
            retval = pdc_client['unreleasedvariants/'](page_size=-1,
                                                       fields=['modulemd', 'rpms'], **query)
            # Error handling
            if not retval:
                raise RuntimeError("Failed to find module in PDC %r" % query)
            if len(retval) != 1:
                raise RuntimeError("Multiple modules in the PDC matched %r" % query)

            objects = Modulemd.objects_from_string(retval[0]['modulemd'])
            assert len(objects) == 1
            mmd = objects[0]
            assert isinstance(mmd, Modulemd.Module)
            # Make sure we have a version 2 modulemd file
            mmd.upgrade()
            rpms = set(retval[0]['rpms'])

            resolved_modules[module.name] = ModuleInfo(module.name, module.stream, module.version,
                                                       mmd, rpms)
        return resolved_modules
def main():
    k = koji.ClientSession('https://koji.fedoraproject.org/kojihub')
    script_dir = os.path.dirname(os.path.realpath(__file__))

    # Get the repo we're running in
    repo = git.Repo(script_dir,
                    search_parent_directories=True)
    branch_version = repo.active_branch

    print(repr(branch_version.name))
    if branch_version.name == "master":
        branch_version = get_rawhide_version(k)
        print("Setting branch_version: %s" % branch_version)

    tags = ['%s-modular' % branch_version,
            '%s-modular-override' % branch_version,
            '%s-modular-pending' % branch_version,
            '%s-modular-signing-pending' % branch_version,
            '%s-modular-updates' % branch_version,
            '%s-modular-updates-candidate' % branch_version,
            '%s-modular-updates-pending' % branch_version,
            '%s-modular-updates-testing' % branch_version,
            '%s-modular-updates-testing-pending' % branch_version]

    tagged_builds = []
    for tag in tags:
        tagged_builds.extend(get_latest_modules_in_tag(k, tag))

    # Make the list unique since some modules may have multiple tags
    unique_builds = {}
    for build in tagged_builds:
        unique_builds[build['id']] = build

    translatable_strings = set()
    module_streams = set()
    for build_id in unique_builds.keys():
        build = k.getBuild(build_id)
        print("Processing %s:%s" % (build['package_name'], build['nvr']))

        module_streams.add("%s:%s" % (
            build['extra']['typeinfo']['module']['name'],
            build['extra']['typeinfo']['module']['stream']))

        modulemds = Modulemd.objects_from_string(
            build['extra']['typeinfo']['module']['modulemd_str'])

        # We should only get a single modulemd document from Koji
        assert len(modulemds) == 1

        translatable_strings.add(modulemds[0].props.summary)
        translatable_strings.add(modulemds[0].props.description)

        # Get any profile descriptions
        for profile_name, profile in modulemds[0].peek_profiles().items():
            if profile.props.description:
                translatable_strings.add(profile.props.description)

    with open ("module_streams.txt", 'w') as f:
        for module_stream in sorted(module_streams):
            f.write("%s\n" % module_stream)

    with open ("fedora-modularity-translations.pot", 'w') as f:
        for tstring in sorted(translatable_strings):
            msgid_string = "\"%s\"" % tstring
            if "\n" in tstring:
                msgid_string = "\"\"\n\"%s\"" % tstring.replace('\n', '\\n"\n"')
            f.write("msgid \"%s\"\n"
                    "msgstr \"\"\n\n" % msgid_string)
Example #6
0
def get_module_catalog_from_tags(session, tags, debug=False):
    """
    Construct a Babel translation source catalog from the contents of the
    provided tags.
    :param session: A Koji session
    :param tags: A set of Koji tags from which module metadata should be pulled
    :param debug: Whether to print debugging information to the console
    :return: A babel.messages.Catalog containing extracted translatable strings
    from any modules in the provided tags. Raises an exception if any of the
    retrieved modulemd is invalid.
    """

    catalog = Catalog(project="fedora-modularity-translations")

    tagged_builds = []
    for tag in tags:
        tagged_builds.extend(get_latest_modules_in_tag(session, tag, debug))

    # Make the list unique since some modules may have multiple tags
    unique_builds = {}
    for build in tagged_builds:
        unique_builds[build['id']] = build

    for build_id in unique_builds.keys():
        # Koji sometimes disconnects for no apparent reason. Retry up to 5
        # times before failing.
        for attempt in range(5):
            try:
                build = session.getBuild(build_id)
            except requests.exceptions.ConnectionError:
                if debug:
                    print("Connection lost while processing buildId %s, "
                          "retrying..." % build_id,
                          file=sys.stderr)
            else:
                # Succeeded this time, so break out of the loop
                break
        if debug:
            print("Processing %s:%s" % (build['package_name'], build['nvr']))

        modulemds = Modulemd.objects_from_string(
            build['extra']['typeinfo']['module']['modulemd_str'])

        # We should only get a single modulemd document from Koji
        if len(modulemds) != 1:
            raise ValueError("Koji build %s returned multiple modulemd YAML "
                             "documents." % build['nvr'])

        mmd = modulemds[0]

        # Process the summary
        msg = catalog.get(mmd.props.summary)
        if msg:
            locations = msg.locations
        else:
            locations = []
        locations.append(
            ("%s;%s;summary" % (mmd.props.name, mmd.props.stream), 1))
        catalog.add(mmd.props.summary, locations=locations)

        # Process the description
        msg = catalog.get(mmd.props.description)
        if msg:
            locations = msg.locations
        else:
            locations = []
        locations.append(
            ("%s;%s;description" % (mmd.props.name, mmd.props.stream), 2))
        catalog.add(mmd.props.description, locations=locations)

        # Get any profile descriptions
        for profile_name, profile in modulemds[0].peek_profiles().items():
            if profile.props.description:
                msg = catalog.get(profile.props.description)
                if msg:
                    locations = msg.locations
                else:
                    locations = []

                locations.append(
                    ("%s;%s;profile;%s" %
                     (mmd.props.name, mmd.props.stream, profile.props.name),
                     3))
                catalog.add(profile.props.description, locations=locations)

    return catalog