def modify_repo(repo_path, modules):
    tmp = tempfile.mkdtemp()
    path = os.path.join(tmp, "modules.yaml")
    for module in modules:
        Modulemd.dump(modules, path)
    subprocess.check_call(["modifyrepo_c", "--mdtype=modules", path,
                           os.path.join(repo_path, "repodata")])
    os.unlink(path)
    os.rmdir(tmp)
    def _resolve_modules(self, compose_source):
        koji_session = get_koji_session(self.workflow, fallback=NO_FALLBACK)

        resolved_modules = {}
        for module in compose_source.strip().split():
            module_spec = ModuleSpec.from_str(module)
            build, rpm_list = get_koji_module_build(koji_session, module_spec)

            # The returned RPM list contains source RPMs and RPMs for all
            # architectures.
            rpms = ['{name}-{epochnum}:{version}-{release}.{arch}.rpm'
                    .format(epochnum=rpm['epoch'] or 0, **rpm)
                    for rpm in rpm_list]

            objects = Modulemd.objects_from_string(
                build['extra']['typeinfo']['module']['modulemd_str'])
            assert len(objects) == 1
            mmd = objects[0]
            assert isinstance(mmd, Modulemd.Module)
            # Make sure we have a version 2 modulemd file
            mmd.upgrade()

            resolved_modules[module_spec.name] = ModuleInfo(module_spec.name,
                                                            module_spec.stream,
                                                            module_spec.version,
                                                            mmd, rpms)
        return resolved_modules
def index_modulemd_files(repo_path):
    result = []
    for fn in sorted(os.listdir(repo_path)):
        if not fn.endswith(".yaml"):
            continue
        yaml_path = os.path.join(repo_path, fn)
        mmd = Modulemd.objects_from_file_ext(yaml_path)
        result.append(mmd[0][0])
    return result
Ejemplo n.º 4
0
 def add_api(self, packages):
     mmd_set = Modulemd.SimpleSet()
     for package in packages:
         mmd_set.add(str(package))
     self.mmd.set_rpm_api(mmd_set)
Ejemplo n.º 5
0
 def __init__(self, name="", stream="", version=0, summary="", config=None):
     self.config = config
     licenses = Modulemd.SimpleSet()
     licenses.add("unknown")
     self.mmd = Modulemd.ModuleStream(mdversion=1, name=name, stream=stream, version=version, summary=summary,
                                      description="", content_licenses=licenses, module_licenses=licenses)
Ejemplo n.º 6
0
 def defaults(self):
     if self._defaults is None:
         self._defaults = Modulemd.Defaults()
         self._defaults.set_module_name(self.name)
         # default stream and profiles remain unset
     return self._defaults
Ejemplo n.º 7
0
                i for i in os.listdir(noarch_module_dir) if i.endswith(".rpm")
            ]
        else:
            noarch_rpms = []

        rpms = sorted(set(rpms) | set(noarch_rpms))

        # HACK: force epoch to make test data compatible with libmodulemd >= 1.4.0
        rpms_with_epoch = []
        for i in rpms:
            n, v, ra = i.rsplit("-", 2)
            nevra = "%s-0:%s-%s" % (n, v, ra)
            rpms_with_epoch.append(nevra)
        rpms = rpms_with_epoch

        mmd = Modulemd.Module()
        mmd.set_mdversion(int(1))
        mmd.set_name(name)
        mmd.set_stream(stream)
        mmd.set_version(int(version))
        sset = Modulemd.SimpleSet()
        sset.add("LGPLv2")
        mmd.set_module_licenses(sset)
        mmd.set_summary("Fake module")
        mmd.set_description(mmd.peek_summary())
        artifacts = Modulemd.SimpleSet()
        for rpm in rpms:
            artifacts.add(rpm[:-4])
        mmd.set_rpm_artifacts(artifacts)
        for profile_name in profiles:
            profile = Modulemd.Profile()
Ejemplo n.º 8
0
    def test_merger(self):
        # Get a set of objects in a ModuleIndex
        base_index = Modulemd.ModuleIndex()
        base_index.update_from_file(
            path.join(self.test_data_path, "merging-base.yaml"), True)

        # Baseline
        httpd_defaults = base_index.get_module('httpd').get_defaults()
        self.assertIsNotNone(httpd_defaults)
        self.assertEqual(httpd_defaults.get_default_stream(), '2.2')
        httpd_profile_streams = httpd_defaults.get_streams_with_default_profiles(
        )
        self.assertEqual(len(httpd_profile_streams), 2)
        self.assertTrue('2.2' in httpd_profile_streams)
        self.assertTrue('2.8' in httpd_profile_streams)
        self.assertEqual(
            len(httpd_defaults.get_default_profiles_for_stream('2.2')), 2)
        self.assertTrue(
            'client' in httpd_defaults.get_default_profiles_for_stream('2.2'))
        self.assertTrue(
            'server' in httpd_defaults.get_default_profiles_for_stream('2.2'))
        self.assertTrue(
            'notreal' in httpd_defaults.get_default_profiles_for_stream('2.8'))

        self.assertEqual(httpd_defaults.get_default_stream('workstation'),
                         '2.4')
        httpd_profile_streams = httpd_defaults.get_streams_with_default_profiles(
            'workstation')
        self.assertEqual(len(httpd_profile_streams), 2)
        self.assertTrue('2.4' in httpd_profile_streams)
        self.assertTrue('2.6' in httpd_profile_streams)

        self.assertEqual(
            len(
                httpd_defaults.get_default_profiles_for_stream(
                    '2.4', 'workstation')), 1)
        self.assertEqual(
            len(
                httpd_defaults.get_default_profiles_for_stream(
                    '2.6', 'workstation')), 3)

        # Get another set of objects that will override the default stream for
        # nodejs
        override_nodejs_index = Modulemd.ModuleIndex()
        override_nodejs_index.update_from_file(
            path.join(self.test_data_path, "overriding-nodejs.yaml"), True)

        # Test that adding both of these at the same priority level results in
        # the no default stream
        merger = Modulemd.ModuleIndexMerger()
        merger.associate_index(base_index, 0)
        merger.associate_index(override_nodejs_index, 0)

        merged_index = merger.resolve()
        self.assertIsNotNone(merged_index)

        nodejs = merged_index.get_module('nodejs')
        self.assertIsNotNone(nodejs)

        nodejs_defaults = nodejs.get_defaults()
        self.assertIsNotNone(nodejs_defaults)
        self.assertIsNone(nodejs_defaults.get_default_stream())

        # Get another set of objects that will override the above
        override_index = Modulemd.ModuleIndex()
        override_index.update_from_file(
            path.join(self.test_data_path, "overriding.yaml"), True)

        # Test that override_index at a higher priority level succeeds
        # Test that adding both of these at the same priority level fails
        # with a merge conflict.
        # Use randomly-selected high and low values to make sure we don't have
        # sorting issues.
        merger = Modulemd.ModuleIndexMerger()
        random_low = random.randint(1, 100)
        random_high = random.randint(101, 999)
        print("Low priority: %d, High priority: %d" %
              (random_low, random_high))
        merger.associate_index(base_index, random_low)
        merger.associate_index(override_index, random_high)

        merged_index = merger.resolve()
        self.assertIsNotNone(merged_index)

        # Validate merged results

        # HTTPD
        httpd_defaults = merged_index.get_module('httpd').get_defaults()
        self.assertIsNotNone(httpd_defaults)
        self.assertEqual(httpd_defaults.get_default_stream(), '2.4')
        httpd_profile_streams = httpd_defaults.get_streams_with_default_profiles(
        )
        self.assertEqual(len(httpd_profile_streams), 2)
        self.assertTrue('2.2' in httpd_profile_streams)
        self.assertTrue('2.4' in httpd_profile_streams)
        self.assertEqual(
            len(httpd_defaults.get_default_profiles_for_stream('2.2')), 2)
        self.assertTrue(
            'client' in httpd_defaults.get_default_profiles_for_stream('2.2'))
        self.assertTrue(
            'server' in httpd_defaults.get_default_profiles_for_stream('2.2'))
        self.assertTrue(
            'client' in httpd_defaults.get_default_profiles_for_stream('2.4'))
        self.assertTrue(
            'server' in httpd_defaults.get_default_profiles_for_stream('2.4'))

        self.assertEqual(httpd_defaults.get_default_stream('workstation'),
                         '2.8')
        httpd_profile_streams = httpd_defaults.get_streams_with_default_profiles(
            'workstation')
        self.assertEqual(len(httpd_profile_streams), 3)
        self.assertTrue('2.4' in httpd_profile_streams)
        self.assertTrue('2.6' in httpd_profile_streams)
        self.assertTrue('2.8' in httpd_profile_streams)
        self.assertEqual(
            len(
                httpd_defaults.get_default_profiles_for_stream(
                    '2.4', 'workstation')), 1)
        self.assertEqual(
            len(
                httpd_defaults.get_default_profiles_for_stream(
                    '2.6', 'workstation')), 3)
        self.assertEqual(
            len(
                httpd_defaults.get_default_profiles_for_stream(
                    '2.8', 'workstation')), 4)
Ejemplo n.º 9
0
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage,
                            mock_flatpak):
    if not mock_flatpak:
        # Check that we actually have flatpak available
        have_flatpak = False
        try:
            output = subprocess.check_output(['flatpak', '--version'],
                                             universal_newlines=True)
            m = re.search('(\d+)\.(\d+)\.(\d+)', output)
            if m and (int(m.group(1)), int(m.group(2)), int(
                    m.group(3))) >= (0, 9, 7):
                have_flatpak = True

        except (subprocess.CalledProcessError, OSError):
            pass

        if not have_flatpak:
            return

    config = CONFIGS[config_name]

    if mock_flatpak:
        (flexmock(subprocess).should_receive("check_call").replace_with(
            mocked_check_call))

        (flexmock(subprocess).should_receive("check_output").replace_with(
            mocked_check_output))

    workflow = DockerBuildWorkflow({
        "provider": "git",
        "uri": "asd"
    }, TEST_IMAGE)
    setattr(workflow, 'builder', X)
    setattr(workflow.builder, 'tasker', docker_tasker)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'w') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']
        mmd = Modulemd.Module.new_from_string(module_config['metadata'])

        # Clear out all dependencies. Setting via the property causes a crash
        # https://gitlab.gnome.org/GNOME/pygobject/issues/37
        #        mmd.props.dependencies = [Modulemd.Dependencies()]
        mmd.set_dependencies([Modulemd.Dependencies()])

        module_config['metadata'] = mmd.dumps()

        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    def stream_to_generator(s):
        while True:
            # Yield small chunks to test the StreamAdapter code better
            buf = s.read(100)
            if len(buf) == 0:
                return
            yield buf

    export_generator = stream_to_generator(export_stream)

    (flexmock(docker_tasker.d).should_receive('export').with_args(
        CONTAINER_ID).and_return(export_generator))

    (flexmock(
        docker_tasker.d.wrapped).should_receive('create_container').with_args(
            workflow.image,
            command=["/bin/bash"]).and_return({'Id': CONTAINER_ID}))
    (flexmock(docker_tasker.d.wrapped).should_receive(
        'remove_container').with_args(CONTAINER_ID))

    setup_flatpak_source_info(workflow, config)

    runner = PrePublishPluginsRunner(docker_tasker, workflow,
                                     [{
                                         'name': FlatpakCreateOciPlugin.key,
                                         'args': {}
                                     }])

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex)
    else:
        runner.run()

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the expected files ended up in the flatpak

        if mock_flatpak:
            inspector = MockInspector(tmpdir, dir_metadata)
        else:
            inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(
            re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l)
            for l in metadata_lines)
        assert any(
            re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l)
            for l in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file(
                '/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
Ejemplo n.º 10
0
    def test_constructors(self):
        for version in modulestream_versions:

            # Test that the new() function works
            stream = Modulemd.ModuleStream.new(version, "foo", "latest")
            assert stream
            assert isinstance(stream, Modulemd.ModuleStream)

            assert stream.props.mdversion == version
            assert stream.get_mdversion() == version
            assert stream.props.module_name == "foo"
            assert stream.get_module_name() == "foo"
            assert stream.props.stream_name == "latest"
            assert stream.get_stream_name() == "latest"

            # Test that the new() function works without a stream name
            stream = Modulemd.ModuleStream.new(version, "foo")
            assert stream
            assert isinstance(stream, Modulemd.ModuleStream)

            assert stream.props.mdversion == version
            assert stream.get_mdversion() == version
            assert stream.props.module_name == "foo"
            assert stream.get_module_name() == "foo"
            assert stream.props.stream_name is None
            assert stream.get_stream_name() is None

            # Test that the new() function works with no module name
            stream = Modulemd.ModuleStream.new(version, None, "latest")
            assert stream
            assert isinstance(stream, Modulemd.ModuleStream)

            assert stream.props.mdversion == version
            assert stream.get_mdversion() == version
            assert stream.props.module_name is None
            assert stream.get_module_name() is None
            assert stream.props.stream_name == "latest"
            assert stream.get_stream_name() == "latest"

            # Test that the new() function works with no module or stream
            stream = Modulemd.ModuleStream.new(version)
            assert stream
            assert isinstance(stream, Modulemd.ModuleStream)

            assert stream.props.mdversion == version
            assert stream.get_mdversion() == version
            assert stream.props.module_name is None
            assert stream.get_module_name() is None
            assert stream.props.stream_name is None
            assert stream.get_stream_name() is None

        # Test that we cannot instantiate directly
        with self.assertRaisesRegexp(
            TypeError, "cannot create instance of abstract"
        ):
            Modulemd.ModuleStream()

        # Test with a zero mdversion
        with self.assertRaisesRegexp(TypeError, "constructor returned NULL"):
            with self.expect_signal():
                defs = Modulemd.ModuleStream.new(0)

        # Test with an unknown mdversion
        with self.assertRaisesRegexp(TypeError, "constructor returned NULL"):
            with self.expect_signal():
                defs = Modulemd.ModuleStream.new(
                    Modulemd.ModuleStreamVersionEnum.LATEST + 1
                )
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, flatpak_metadata, breakage):
    # Check that we actually have flatpak available
    have_flatpak = False
    try:
        output = subprocess.check_output(['flatpak', '--version'],
                                         universal_newlines=True)
        m = re.search(r'(\d+)\.(\d+)\.(\d+)', output)
        if m and (int(m.group(1)), int(m.group(2)), int(m.group(3))) >= (0, 9, 7):
            have_flatpak = True

    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='flatpak not available')

    if not have_flatpak:
        return

    # Check if we have skopeo
    try:
        subprocess.check_output(['skopeo', '--version'])
    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='skopeo not available')

    config = CONFIGS[config_name]

    workflow = DockerBuildWorkflow(
        TEST_IMAGE,
        source={"provider": "git", "uri": "asd"}
    )
    setattr(workflow, 'builder', MockBuilder())
    workflow.builder.df_path = write_docker_file(config, str(tmpdir))
    setattr(workflow.builder, 'tasker', docker_tasker)

    #  Make a local copy instead of pushing oci to docker storage
    workflow.storage_transport = 'oci:{}'.format(str(tmpdir))

    make_and_store_reactor_config_map(workflow, flatpak_metadata)

    filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
    os.mkdir(filesystem_dir)

    filesystem_contents = config['filesystem_contents']

    for path, contents in filesystem_contents.items():
        parts = path.split(':', 1)
        path = parts[0]
        mode = parts[1] if len(parts) == 2 else None

        fullpath = os.path.join(filesystem_dir, path[1:])
        parent_dir = os.path.dirname(fullpath)
        if not os.path.isdir(parent_dir):
            os.makedirs(parent_dir)

        if contents is None:
            os.mkdir(fullpath)
        else:
            with open(fullpath, 'wb') as f:
                f.write(contents)

        if mode is not None:
            os.chmod(fullpath, int(mode, 8))

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']

        mmd = Modulemd.ModuleStream.read_string(module_config['metadata'], strict=True)
        mmd.clear_dependencies()
        mmd.add_dependencies(Modulemd.Dependencies())
        mmd_index = Modulemd.ModuleIndex.new()
        mmd_index.add_module_stream(mmd)
        module_config['metadata'] = mmd_index.dump_to_string()

        expected_exception = 'Failed to identify runtime module'
    elif breakage == 'copy_error':
        workflow.storage_transport = 'idontexist'
        expected_exception = 'CalledProcessError'
    else:
        assert breakage is None
        expected_exception = None

    filesystem_tar = os.path.join(filesystem_dir, 'tar')
    with open(filesystem_tar, "wb") as f:
        with tarfile.TarFile(fileobj=f, mode='w') as tf:
            for f in os.listdir(filesystem_dir):
                tf.add(os.path.join(filesystem_dir, f), f)

    export_stream = open(filesystem_tar, "rb")

    def stream_to_generator(s):
        while True:
            # Yield small chunks to test the StreamAdapter code better
            buf = s.read(100)
            if len(buf) == 0:
                return
            yield buf

    export_generator = stream_to_generator(export_stream)

    (flexmock(docker_tasker.tasker)
     .should_receive('export_container')
     .with_args(CONTAINER_ID)
     .and_return(export_generator))

    (flexmock(docker_tasker.tasker.d.wrapped)
     .should_receive('create_container')
     .with_args(workflow.image, command=["/bin/bash"])
     .and_return({'Id': CONTAINER_ID}))

    (flexmock(docker_tasker.tasker.d.wrapped)
     .should_receive('remove_container')
     .with_args(CONTAINER_ID, force=False))

    setup_flatpak_source_info(workflow, config)

    runner = PrePublishPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': FlatpakCreateOciPlugin.key,
            'args': {}
        }]
    )

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex.value)
    else:
        # Check if run replaces image_id and marks filesystem image for removal
        filesystem_image_id = 'xxx'
        for_removal = workflow.plugin_workspace.get(
            'remove_built_image', {}).get('images_to_remove', [])
        assert workflow.builder.image_id == filesystem_image_id
        assert filesystem_image_id not in for_removal
        runner.run()
        for_removal = workflow.plugin_workspace['remove_built_image']['images_to_remove']
        assert re.match(r'^sha256:\w{64}$', workflow.builder.image_id)
        assert filesystem_image_id in for_removal

        dir_metadata = workflow.exported_image_sequence[-2]
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        tar_metadata = workflow.exported_image_sequence[-1]
        assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR

        # Check that the correct labels and annotations were written

        labels, annotations = load_labels_and_annotations(dir_metadata)

        if config_name == 'app':
            assert labels['name'] == 'eog'
            assert labels['com.redhat.component'] == 'eog'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170629213428'
        elif config_name == 'runtime':  # runtime
            assert labels['name'] == 'flatpak-runtime'
            assert labels['com.redhat.component'] == 'flatpak-runtime-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'
        else:
            assert labels['name'] == 'flatpak-sdk'
            assert labels['com.redhat.component'] == 'flatpak-sdk-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'

        if flatpak_metadata == 'annotations':
            assert annotations.get('org.flatpak.ref') == config['expected_ref_name']
            assert 'org.flatpak.ref' not in labels
        elif flatpak_metadata == 'labels':
            assert 'org.flatpak.ref' not in annotations
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']
        elif flatpak_metadata == 'both':
            assert annotations.get('org.flatpak.ref') == config['expected_ref_name']
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']

        # Check that the expected files ended up in the flatpak

        # Flatpak versions before 1.6 require annotations to be present, since we don't
        # require such a new Flatpak, skip remaining checks in the label-only case
        if flatpak_metadata == 'labels':
            return

        inspector = DefaultInspector(tmpdir, dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        components = {c['name'] for c in workflow.image_components}  # noqa:E501; pylint: disable=not-an-iterable
        for n in config['expected_components']:
            assert n in components
        for n in config['unexpected_components']:
            assert n not in components

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l)
                   for l in metadata_lines)
        assert any(re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l)
                   for l in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file('/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
Ejemplo n.º 12
0
def get_module_catalog_from_tags(session, tags, debug=False):
    """
    Construct a Babel translation source catalog from the contents of the
    provided tags.
    :param session: A Koji session
    :param tags: A set of Koji tags from which module metadata should be pulled
    :param debug: Whether to print debugging information to the console
    :return: A babel.messages.Catalog containing extracted translatable strings
    from any modules in the provided tags. Raises an exception if any of the
    retrieved modulemd is invalid.
    """

    catalog = Catalog(project="fedora-modularity-translations")

    tagged_builds = []
    for tag in tags:
        tagged_builds.extend(get_latest_modules_in_tag(session, tag, debug))

    # Make the list unique since some modules may have multiple tags
    unique_builds = {}
    for build in tagged_builds:
        unique_builds[build['id']] = build

    for build_id in unique_builds.keys():
        # Koji sometimes disconnects for no apparent reason. Retry up to 5
        # times before failing.
        for attempt in range(5):
            try:
                build = session.getBuild(build_id)
            except requests.exceptions.ConnectionError:
                if debug:
                    print("Connection lost while processing buildId %s, "
                          "retrying..." % build_id,
                          file=sys.stderr)
            else:
                # Succeeded this time, so break out of the loop
                break
        if debug:
            print("Processing %s:%s" % (build['package_name'], build['nvr']))

        modulemds = Modulemd.objects_from_string(
            build['extra']['typeinfo']['module']['modulemd_str'])

        # We should only get a single modulemd document from Koji
        if len(modulemds) != 1:
            raise ValueError("Koji build %s returned multiple modulemd YAML "
                             "documents." % build['nvr'])

        mmd = modulemds[0]

        # Process the summary
        msg = catalog.get(mmd.props.summary)
        if msg:
            locations = msg.locations
        else:
            locations = []
        locations.append(
            ("%s;%s;summary" % (mmd.props.name, mmd.props.stream), 1))
        catalog.add(mmd.props.summary, locations=locations)

        # Process the description
        msg = catalog.get(mmd.props.description)
        if msg:
            locations = msg.locations
        else:
            locations = []
        locations.append(
            ("%s;%s;description" % (mmd.props.name, mmd.props.stream), 2))
        catalog.add(mmd.props.description, locations=locations)

        # Get any profile descriptions
        for profile_name, profile in modulemds[0].peek_profiles().items():
            if profile.props.description:
                msg = catalog.get(profile.props.description)
                if msg:
                    locations = msg.locations
                else:
                    locations = []

                locations.append(
                    ("%s;%s;profile;%s" %
                     (mmd.props.name, mmd.props.stream, profile.props.name),
                     3))
                catalog.add(profile.props.description, locations=locations)

    return catalog
    def test_constructors(self):
        # Test that the new() function works
        te = Modulemd.TranslationEntry.new('en_US')
        assert te
        assert te.props.locale == 'en_US'
        assert te.get_locale() == 'en_US'
        assert te.props.summary is None
        assert te.get_summary() is None
        assert te.props.description is None
        assert te.get_description() is None
        assert te.get_profiles() == []
        assert te.get_profile_description("test") is None

        # Test that keyword arg locale is accepted
        te = Modulemd.TranslationEntry(locale='en_US')
        assert te
        assert te.props.locale == 'en_US'
        assert te.get_locale() == 'en_US'
        assert te.props.summary is None
        assert te.get_summary() is None
        assert te.props.description is None
        assert te.get_description() is None
        assert te.get_profiles() == []
        assert te.get_profile_description("test") is None

        # Test that init works with locale and summary
        te = Modulemd.TranslationEntry(locale='en_US', summary='foobar')
        assert te
        assert te.props.locale == 'en_US'
        assert te.get_locale() == 'en_US'
        assert te.props.summary == 'foobar'
        assert te.get_summary() == 'foobar'
        assert te.props.description is None
        assert te.get_description() is None
        assert te.get_profiles() == []
        assert te.get_profile_description("test") is None

        # Test that init works with locale and description
        te = Modulemd.TranslationEntry(locale='en_US', description='barfoo')
        assert te
        assert te.props.locale == 'en_US'
        assert te.get_locale() == 'en_US'
        assert te.props.summary is None
        assert te.get_summary() is None
        assert te.props.description == 'barfoo'
        assert te.get_description() == 'barfoo'
        assert te.get_profiles() == []
        assert te.get_profile_description("test") is None

        # Test that init works with locale, summary and description
        te = Modulemd.TranslationEntry(locale='en_US',
                                       summary='foobar',
                                       description='barfoo')
        assert te
        assert te.props.locale == 'en_US'
        assert te.get_locale() == 'en_US'
        assert te.props.summary == 'foobar'
        assert te.get_summary() == 'foobar'
        assert te.props.description == 'barfoo'
        assert te.get_description() == 'barfoo'
        assert te.get_profiles() == []
        assert te.get_profile_description("test") is None

        # Test that init works with locale, unicode summary and unicode
        # description
        te = Modulemd.TranslationEntry(
            locale='ro_TA',  # robots_Tables
            summary='(┛ಠ_ಠ)┛彡┻━┻',
            description='(┛ಠ_ಠ)┛彡')
        assert te
        assert te.props.locale == 'ro_TA'
        assert te.get_locale() == 'ro_TA'
        assert te.props.summary == '(┛ಠ_ಠ)┛彡┻━┻'
        assert te.get_summary() == '(┛ಠ_ಠ)┛彡┻━┻'
        assert te.props.description == '(┛ಠ_ಠ)┛彡'
        assert te.get_description() == '(┛ಠ_ಠ)┛彡'
        assert te.get_profiles() == []
        assert te.get_profile_description("test") is None

        # Test that we fail if we call new() with a None locale
        try:
            te = Modulemd.TranslationEntry.new(None)
            assert False
        except TypeError as e:
            assert 'does not allow None as a value' in e.__str__()

        # Test that we fail if object is instantiated without a locale
        with self.expect_signal():
            Modulemd.TranslationEntry()

        # Test that we fail if object is instantiated with a None locale
        with self.expect_signal():
            Modulemd.TranslationEntry(locale=None)
Ejemplo n.º 14
0
def test_flatpak_create_oci(workflow, config_name, flatpak_metadata, breakage):
    # Check that we actually have flatpak available
    have_flatpak = False
    try:
        output = subprocess.check_output(['flatpak', '--version'],
                                         universal_newlines=True)
        m = re.search(r'(\d+)\.(\d+)\.(\d+)', output)
        if m and (int(m.group(1)), int(m.group(2)), int(
                m.group(3))) >= (0, 9, 7):
            have_flatpak = True

    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='flatpak not available')

    if not have_flatpak:
        return

    # Check if we have skopeo
    try:
        subprocess.check_output(['skopeo', '--version'])
    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='skopeo not available')

    config = CONFIGS[config_name]

    platforms = ['x86_64', 'aarch64', 's390x', 'ppc64le']

    workflow.user_params['flatpak'] = True
    write_docker_file(config, workflow.source.path)
    workflow.build_dir.init_build_dirs(platforms, workflow.source)
    mock_extract_filesystem_call = functools.partial(mock_extract_filesystem,
                                                     config)
    (flexmock(ImageUtil).should_receive(
        'extract_filesystem_layer').replace_with(mock_extract_filesystem_call))

    make_and_store_reactor_config_map(workflow, flatpak_metadata)

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']

        mmd = Modulemd.ModuleStream.read_string(module_config['metadata'],
                                                strict=True)
        mmd.clear_dependencies()
        mmd.add_dependencies(Modulemd.Dependencies())
        mmd_index = Modulemd.ModuleIndex.new()
        mmd_index.add_module_stream(mmd)
        module_config['metadata'] = mmd_index.dump_to_string()

        expected_exception = 'Failed to identify runtime module'
    else:
        assert breakage is None
        expected_exception = None

    runner = PostBuildPluginsRunner(workflow, [{
        'name': FlatpakCreateOciPlugin.key,
        'args': {}
    }])

    setup_flatpak_composes(workflow)
    source = setup_flatpak_source_info(config)
    (flexmock(FlatpakUtil).should_receive(
        'get_flatpak_source_info').and_return(source))

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex.value)
    else:
        builder = FlatpakBuilder(source,
                                 workflow.build_dir.any_platform.path,
                                 'var/tmp/flatpak-build',
                                 parse_manifest=parse_rpm_output,
                                 flatpak_metadata=FLATPAK_METADATA_ANNOTATIONS)
        with NamedTemporaryFile(dir=workflow.build_dir.any_platform.path) as f:
            f.write(
                config['filesystem_contents']['/var/tmp/flatpak-build.rpm_qf'])
            f.flush()
            expected_components = builder.get_components(f.name)
        results = runner.run()
        x86_64_results = results[FlatpakCreateOciPlugin.key][platforms[0]]
        dir_metadata = x86_64_results['metadata']
        components = x86_64_results['components']
        assert components == expected_components
        assert dir_metadata['type'] == IMAGE_TYPE_OCI

        # Check that the correct labels and annotations were written

        labels, annotations = load_labels_and_annotations(dir_metadata)

        if config_name == 'app':
            assert labels['name'] == 'eog'
            assert labels['com.redhat.component'] == 'eog'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170629213428'
        elif config_name == 'runtime':  # runtime
            assert labels['name'] == 'flatpak-runtime'
            assert labels[
                'com.redhat.component'] == 'flatpak-runtime-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'
        else:
            assert labels['name'] == 'flatpak-sdk'
            assert labels['com.redhat.component'] == 'flatpak-sdk-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'

        if flatpak_metadata == 'annotations':
            assert annotations.get(
                'org.flatpak.ref') == config['expected_ref_name']
            assert 'org.flatpak.ref' not in labels
        elif flatpak_metadata == 'labels':
            assert 'org.flatpak.ref' not in annotations
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']
        elif flatpak_metadata == 'both':
            assert annotations.get(
                'org.flatpak.ref') == config['expected_ref_name']
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']

        # Check that the expected files ended up in the flatpak

        # Flatpak versions before 1.6 require annotations to be present, and Flatpak
        # versions 1.6 and later require labels to be present. Skip the remaining
        # checks unless we have both annotations and labels.
        if flatpak_metadata != 'both':
            return

        inspector = DefaultInspector(str(workflow.build_dir.any_platform.path),
                                     dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(
            re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', line)
            for line in metadata_lines)
        assert any(
            re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', line)
            for line in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file(
                '/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
Ejemplo n.º 15
0
    def test_find_module_version(self):
        rmd = RepoModuleDict(None)

        mmd = self._create_mmd(name="module-name",
                               stream="stream",
                               version=1,
                               profiles={"default": {}})
        rmv = RepoModuleVersion(mmd, None, None)
        rmd.add(rmv)

        mmd = self._create_mmd(name="module-name",
                               stream="stream",
                               version=2,
                               profiles={"default": {}})
        rmv = RepoModuleVersion(mmd, None, None)
        rmd.add(rmv)

        mmd = self._create_mmd(name="module-name",
                               stream="enabled_stream",
                               version=1,
                               profiles={"default": {}})
        rmv = RepoModuleVersion(mmd, None, None)
        rmd.add(rmv)

        mmd = self._create_mmd(name="module-name",
                               stream="default_stream",
                               version=1,
                               profiles={"default": {}})
        rmv = RepoModuleVersion(mmd, None, None)
        rmd.add(rmv)

        # set defaults
        defaults = Modulemd.Defaults()
        defaults.set_module_name("module-name")
        defaults.set_default_stream("stream")
        # no default profiles provided
        rmd["module-name"].defaults = defaults

        # no default, no active -> can't find stream automatically
        rmv = rmd.find_module_version(name="module-name")
        self.assertEqual(rmv.full_version, "module-name:stream:2")

        # set enabled stream
        conf = dnf.conf.ModuleConf(section="test")
        conf.enabled._set(1)
        conf.stream._set("enabled_stream")
        rmd["module-name"].conf = conf

        # stream provided by user
        rmv = rmd.find_module_version(name="module-name", stream="stream")
        self.assertEqual(rmv.full_version, "module-name:stream:2")

        # stream and version provided by user
        rmv = rmd.find_module_version(name="module-name",
                                      stream="stream",
                                      version=1)
        self.assertEqual(rmv.full_version, "module-name:stream:1")

        # stream == active stream
        rmv = rmd.find_module_version(name="module-name")
        self.assertEqual(rmv.full_version, "module-name:enabled_stream:1")

        # stream == default stream
        conf.enabled._set(0)
        rmv = rmd.find_module_version(name="module-name")
        self.assertEqual(rmv.full_version, "module-name:stream:2")
            artifacts.add(rpm[:-4])
        mmd.set_rpm_artifacts(artifacts)
        for profile_name in profiles:
            profile = Modulemd.Profile()
            profile.set_name(profile_name)
            profile_rpms = Modulemd.SimpleSet()
            profile_rpms.set(profiles[profile_name]["rpms"])
            profile.set_rpms(profile_rpms)
            mmd.add_profile(profile)

        if name == "httpd":
            dependencies = Modulemd.Dependencies()
            if stream == "2.4":
                dependencies.add_requires_single("base-runtime", "f26")
            elif stream == "2.2":
                dependencies.add_requires("base-runtime", [])
            mmd.add_dependencies(dependencies)

        # iterate through all deps and create context hash in a repeatable manner
        context_hash = hashlib.sha256()
        for dependencies in mmd.peek_dependencies():
            for dep_name, dep_streams in dependencies.peek_requires().items():
                if dep_streams:
                    for dep_stream in dep_streams.get():
                        context_hash.update("%s:%s" % (dep_name, dep_stream))
                else:
                    context_hash.update(dep_name)
        mmd.set_context(context_hash.hexdigest()[:8])

        Modulemd.dump([mmd], os.path.join(module_dir, "%s.%s.yaml" % (module_id, arch)))
Ejemplo n.º 17
0
 def test_version(self):
     # Make sure that we are linking against the correct version
     expected_version = os.getenv('MODULEMD_VERSION')
     assert expected_version is None or expected_version == Modulemd.get_version(
     )
Ejemplo n.º 18
0
    def test_constructors(self):
        merger = Modulemd.ModuleIndexMerger()
        self.assertIsNotNone(merger)

        merger = Modulemd.ModuleIndexMerger.new()
        self.assertIsNotNone(merger)