Esempio n. 1
0
def test_add_flatpak_labels(workflow, source_dir, labels, expected):

    if labels is not None:
        data = {'flatpak': {'labels': labels}}
    else:
        data = {}
    container_yaml = yaml.dump(data)

    mock_workflow(workflow, source_dir, container_yaml)

    runner = (MockEnv(workflow).for_plugin(
        AddFlatpakLabelsPlugin.key).create_runner())
    runner.run()

    def check_last_line_in_df(build_dir):
        lines = build_dir.dockerfile_path.read_text().splitlines()
        if expected:
            assert lines[-1] == expected
        else:
            assert lines[-1] == "CMD sleep 1000"

    workflow.build_dir.for_each_platform(check_last_line_in_df)
def test_invalid_repourl(workflow, build_dir):
    """Plugin should raise RuntimeError with repo details when invalid URL
       is used
    """
    wrong_repo_url = "http://example.com/nope/repo"
    workflow = prepare(workflow,
                       build_dir,
                       yum_repourls={'x86_64': [wrong_repo_url]})

    runner = (MockEnv(workflow).for_plugin(InjectYumReposPlugin.key,
                                           args={
                                               'inject_proxy': None
                                           }).create_runner())

    (flexmock(YumRepo).should_receive('fetch').and_raise(
        Exception, 'Oh noes, repo is not working!'))

    with pytest.raises(PluginFailedException) as exc:
        runner.run()

    msg = "Failed to fetch yum repo {repo}".format(repo=wrong_repo_url)
    assert msg in str(exc.value)
def mock_env(
    tmpdir,
    docker_tasker,
    platform='x86_64',
    base_layers=0,
    icm_url=CACHITO_ICM_URL,
    r_c_m_override=None,
):
    inspection_data = {
        INSPECT_ROOTFS: {
            INSPECT_ROOTFS_LAYERS: list(range(base_layers))
        }
    }
    if r_c_m_override is None:
        r_c_m = {
            'version': 1,
            'cachito': {
                'api_url': CACHITO_URL,
                'auth': {
                    'ssl_certs_dir': str(tmpdir),
                },
            },
        }
    else:
        r_c_m = r_c_m_override
    env = (MockEnv().for_plugin(
        'prebuild', AddImageContentManifestPlugin.key, {
            'remote_source_icm_url': icm_url
        }).set_reactor_config(r_c_m).make_orchestrator())
    tmpdir.join('cert').write('')
    env.workflow.builder.set_inspection_data(inspection_data)
    env.workflow.user_params['platform'] = platform

    # Ensure to succeed in reading the content_sets.yml
    env.workflow.source.get_build_file_path = lambda: (str(tmpdir), str(tmpdir)
                                                       )

    return env.create_runner(docker_tasker)
def create_runner(workflow,
                  ssl_certs=False,
                  principal=None,
                  keytab=None,
                  poll_interval=0.01,
                  proxy_user=None,
                  use_args=True,
                  koji_target='koji-target'):
    add_koji_map_in_workflow(workflow,
                             hub_url='',
                             ssl_certs_dir='/' if ssl_certs else None,
                             krb_keytab=keytab,
                             krb_principal=principal,
                             proxyuser=proxy_user)

    args = {'target': koji_target}
    if poll_interval is not None:
        args['poll_interval'] = poll_interval
    if not use_args:
        args = {'target': koji_target}

    return (MockEnv(workflow).for_plugin(KojiTagBuildPlugin.key,
                                         args=args).create_runner())
Esempio n. 5
0
def test_get_built_images_multiple_manifest_types(workflow):
    MockEnv(workflow).set_check_platforms_result(["x86_64"])
    workflow.data.tag_conf.add_unique_image(UNIQUE_IMAGE)

    flexmock(ManifestUtil).should_receive("__init__")  # and do nothing, this test doesn't use it

    (
        flexmock(RegistryClient)
        .should_receive("get_manifest_digests")
        .with_args(ImageName.parse(f"{UNIQUE_IMAGE}-x86_64"), versions=("v2", "oci"))
        .and_return(ManifestDigest({"v2": make_digest("foo"), "oci": make_digest("bar")}))
    )

    plugin = GroupManifestsPlugin(workflow)
    session = RegistrySession(REGISTRY_V2)

    expect_error = (
        f"Expected to find a single manifest digest for {UNIQUE_IMAGE}-x86_64, "
        "but found multiple: {'v2': .*, 'oci': .*}"
    )

    with pytest.raises(RuntimeError, match=expect_error):
        plugin.get_built_images(session)
def mock_env(workflow, r_c_m=None, domains_override=None):
    if not r_c_m:
        r_c_m = {
            'version': 1,
            'koji': {
                'hub_url': KOJI_HUB,
                'root_url': KOJI_ROOT,
                'auth': {}
            },
            'pnc': {
                'base_api_url': PNC_ROOT,
                'get_artifact_path': 'artifacts/{}',
            },
        }

    if domains_override:
        r_c_m.setdefault('artifacts_allowed_domains', domains_override)

    env = (MockEnv(workflow).for_plugin(
        FetchMavenArtifactsPlugin.key).set_reactor_config(r_c_m))

    workflow.build_dir.init_build_dirs(["aarch64", "x86_64"], workflow.source)
    return env
def test_failed_build(workflow, source_dir, caplog, user_params):
    """
    Test if proper error state is returned when build inside build
    container failed
    """
    (flexmock(subprocess).should_receive('check_output').and_raise(
        subprocess.CalledProcessError(1, 'cmd', output='stub stdout')))
    some_dir = workflow.build_dir.path / 'some_dir'
    some_dir.mkdir()

    fetch_sources_result = {
        'image_sources_dir': str(some_dir),
        'remote_sources_dir': str(some_dir),
        'maven_sources_dir': str(some_dir),
    }
    runner = (MockEnv(workflow).for_plugin(
        SourceContainerPlugin.key).set_plugin_result(
            FetchSourcesPlugin.key, fetch_sources_result).create_runner())

    with pytest.raises(PluginFailedException, match="BSI utility failed"):
        runner.run()
    assert 'BSI failed with output:' in caplog.text
    assert 'stub stdout' in caplog.text
def test_skip_plugin(workflow, caplog):
    reactor_config = {
        'registry': {
            'url': LOCALHOST_REGISTRY,
            'insecure': True,
            'auth': {},
        },
    }
    runner = (MockEnv(workflow).for_plugin(
        TagAndPushPlugin.key).set_reactor_config(
            reactor_config).create_runner())

    results = runner.run()[TagAndPushPlugin.key]
    assert 'not a flatpak or source build, skipping plugin' in caplog.text
    assert 'pushed_images' in results
    assert 'repositories' in results
    assert not results['pushed_images']
    repositories = results['repositories']
    assert 'primary' in repositories
    assert 'unique' in repositories
    assert 'floating' in repositories
    assert not repositories['primary']
    assert not repositories['unique']
    assert not repositories['floating']
Esempio n. 9
0
def test_rpmqa_plugin_success(caplog, workflow, build_dir, base_from_scratch,
                              ignore_autogenerated):
    (flexmock(retries).should_receive("run_cmd").replace_with(
        mock_oc_image_extract))

    (flexmock(_RandomNameSequence).should_receive("__next__").times(
        4).and_return('abcdef12'))

    (flexmock(subprocess).should_receive("check_output").times(4).and_return(
        "\n".join(PACKAGE_LIST_WITH_AUTOGENERATED)))

    platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64']
    workflow.build_dir.init_build_dirs(platforms, workflow.source)
    workflow.data.tag_conf.add_unique_image(f'registry.com/{TEST_IMAGE}')

    (MockEnv(workflow).for_plugin(RPMqaPlugin.key).set_plugin_args({
        "ignore_autogenerated_gpg_keys":
        ignore_autogenerated["ignore"]
    }).set_dockerfile_images(
        ['scratch'] if base_from_scratch else []).create_runner().run())

    for platform in platforms:
        assert workflow.data.image_components[platform] == parse_rpm_output(
            ignore_autogenerated["package_list"])
def test_include_koji(workflow, build_dir, caplog, parent_images,
                      base_from_scratch, target, expect_success, root,
                      koji_ssl_certs, expected_string, proxy):
    prepare(workflow,
            build_dir,
            include_koji_repo=True,
            koji_ssl_certs=koji_ssl_certs,
            yum_proxy=proxy,
            root_url=root)
    dockerfile_images = []
    if parent_images:
        dockerfile_images.append('parent_image:latest')
    if base_from_scratch:
        dockerfile_images.append('scratch')
    workflow.data.dockerfile_images = DockerfileImages(dockerfile_images)

    runner = (MockEnv(workflow).for_plugin(InjectYumReposPlugin.key,
                                           args={
                                               'target': target
                                           }).create_runner())

    if target == KOJI_TARGET_NO_INFO and parent_images:
        with pytest.raises(PluginFailedException) as exc:
            runner.run()
        assert f"Provided target '{target}' doesn't exist!" in str(exc.value)
        assert f"provided target '{target}' doesn't exist" in caplog.text
    else:
        runner.run()

    if not parent_images:
        log_msg = "Skipping plugin, from scratch stage(s) can't add repos"
        assert log_msg in caplog.text
        return
    if not expect_success:
        return

    if proxy:
        assert f"Setting yum proxy to {proxy}" in caplog.text

    if koji_ssl_certs:
        for file_path, expected in [(workflow.koji_session.cert_path, 'cert'),
                                    (workflow.koji_session.serverca_path,
                                     'serverca')]:
            assert os.path.isfile(file_path)
            with open(file_path, 'r') as fd:
                assert fd.read() == expected

    repos_path = workflow.build_dir.any_platform.path / RELATIVE_REPOS_PATH
    repofile = 'target-?????.repo'
    files = os.listdir(repos_path)
    assert len(files) == 1
    assert fnmatch(next(iter(files)), repofile)
    with open(repos_path / files[0], 'r') as f:
        content = f.read()
    assert content.startswith("[atomic-reactor-koji-plugin-target]\n")
    assert "gpgcheck=0\n" in content
    assert "enabled=1\n" in content
    assert "name=atomic-reactor-koji-plugin-target\n" in content
    assert "baseurl=%s/repos/tag/2/$basearch\n" % root in content

    if proxy:
        assert "proxy=%s" % proxy in content

    if expected_string:
        assert expected_string in content
Esempio n. 11
0
def test_flatpak_create_oci(workflow, config_name, flatpak_metadata, breakage):
    # Check that we actually have flatpak available
    have_flatpak = False
    try:
        output = subprocess.check_output(['flatpak', '--version'],
                                         universal_newlines=True)
        m = re.search(r'(\d+)\.(\d+)\.(\d+)', output)
        if m and (int(m.group(1)), int(m.group(2)), int(
                m.group(3))) >= (0, 9, 7):
            have_flatpak = True

    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='flatpak not available')

    if not have_flatpak:
        return

    # Check if we have skopeo
    try:
        subprocess.check_output(['skopeo', '--version'])
    except (subprocess.CalledProcessError, OSError):
        pytest.skip(msg='skopeo not available')

    config = CONFIGS[config_name]

    runner = (MockEnv(workflow).for_plugin(
        FlatpakCreateOciPlugin.key).set_reactor_config({
            'flatpak': {
                'metadata': flatpak_metadata
            }
        }).create_runner())

    platforms = ['x86_64', 'aarch64', 's390x', 'ppc64le']
    workflow.user_params['flatpak'] = True
    write_docker_file(config, workflow.source.path)
    workflow.build_dir.init_build_dirs(platforms, workflow.source)

    mock_extract_filesystem_call = functools.partial(mock_extract_filesystem,
                                                     config)
    (flexmock(ImageUtil).should_receive(
        'extract_filesystem_layer').replace_with(mock_extract_filesystem_call))

    for image_platform in platforms:
        image_path = workflow.build_dir.platform_dir(
            image_platform).exported_squashed_image
        os.mknod(image_path)

    if breakage == 'no_runtime':
        # Copy the parts of the config we are going to change
        config = dict(config)
        config['modules'] = dict(config['modules'])
        config['modules']['eog'] = dict(config['modules']['eog'])

        module_config = config['modules']['eog']

        mmd = Modulemd.ModuleStream.read_string(module_config['metadata'],
                                                strict=True)
        mmd.clear_dependencies()
        mmd.add_dependencies(Modulemd.Dependencies())
        mmd_index = Modulemd.ModuleIndex.new()
        mmd_index.add_module_stream(mmd)
        module_config['metadata'] = mmd_index.dump_to_string()

        expected_exception = 'Failed to identify runtime module'
    elif breakage == 'run_cmd_failed':
        for platform in platforms:
            platform_dir = workflow.build_dir.platform_dir(platform)
            skopeo_cmd = [
                'skopeo', 'copy', f'oci:{str(platform_dir.path)}'
                f'/flatpak-oci-image:app/org.gnome.eog/x86_64/stable',
                '--format=v2s2',
                f'docker-archive:{platform_dir.exported_squashed_image}'
            ]
            flexmock(retries).should_receive('run_cmd').with_args(
                skopeo_cmd).and_raise(
                    subprocess.CalledProcessError(
                        1, ["skopeo", "..."], output=b'something went wrong'))
        expected_exception = 'skopeo copy failed with output:'
    else:
        assert breakage is None
        expected_exception = None

    setup_flatpak_composes(workflow)
    source = setup_flatpak_source_info(config)
    (flexmock(FlatpakUtil).should_receive(
        'get_flatpak_source_info').and_return(source))

    if expected_exception:
        with pytest.raises(PluginFailedException, match=expected_exception):
            runner.run()
    else:
        builder = FlatpakBuilder(source,
                                 workflow.build_dir.any_platform.path,
                                 'var/tmp/flatpak-build',
                                 parse_manifest=parse_rpm_output,
                                 flatpak_metadata=FLATPAK_METADATA_ANNOTATIONS)
        with NamedTemporaryFile(dir=workflow.build_dir.any_platform.path) as f:
            f.write(
                config['filesystem_contents']['/var/tmp/flatpak-build.rpm_qf'])
            f.flush()
            expected_components = builder.get_components(f.name)
        results = runner.run()
        x86_64_results = results[FlatpakCreateOciPlugin.key][platforms[0]]
        dir_metadata = x86_64_results
        assert workflow.data.image_components[
            platforms[0]] == expected_components
        assert dir_metadata['type'] == IMAGE_TYPE_OCI
        for image_platform in platforms:
            image_path = workflow.build_dir.platform_dir(
                image_platform).exported_squashed_image
            assert image_path.exists()

        # Check that the correct labels and annotations were written

        labels, annotations = load_labels_and_annotations(dir_metadata)

        if config_name == 'app':
            assert labels['name'] == 'eog'
            assert labels['com.redhat.component'] == 'eog'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170629213428'
        elif config_name == 'runtime':  # runtime
            assert labels['name'] == 'flatpak-runtime'
            assert labels[
                'com.redhat.component'] == 'flatpak-runtime-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'
        else:
            assert labels['name'] == 'flatpak-sdk'
            assert labels['com.redhat.component'] == 'flatpak-sdk-container'
            assert labels['version'] == 'f28'
            assert labels['release'] == '20170701152209'

        if flatpak_metadata == 'annotations':
            assert annotations.get(
                'org.flatpak.ref') == config['expected_ref_name']
            assert 'org.flatpak.ref' not in labels
        elif flatpak_metadata == 'labels':
            assert 'org.flatpak.ref' not in annotations
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']
        elif flatpak_metadata == 'both':
            assert annotations.get(
                'org.flatpak.ref') == config['expected_ref_name']
            assert labels.get('org.flatpak.ref') == config['expected_ref_name']

        # Check that the expected files ended up in the flatpak

        # Flatpak versions before 1.6 require annotations to be present, and Flatpak
        # versions 1.6 and later require labels to be present. Skip the remaining
        # checks unless we have both annotations and labels.
        if flatpak_metadata != 'both':
            return

        inspector = DefaultInspector(str(workflow.build_dir.any_platform.path),
                                     dir_metadata)

        files = inspector.list_files()
        assert sorted(files) == config['expected_contents']

        metadata_lines = inspector.cat_file('/metadata').split('\n')
        assert any(
            re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', line)
            for line in metadata_lines)
        assert any(
            re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', line)
            for line in metadata_lines)

        if config_name == 'app':
            # Check that the desktop file was rewritten
            output = inspector.cat_file(
                '/export/share/applications/org.gnome.eog.desktop')
            lines = output.split('\n')
            assert 'Icon=org.gnome.eog' in lines

            assert 'name=org.gnome.eog' in metadata_lines
            assert 'tags=Viewer' in metadata_lines
            assert 'command=eog2' in metadata_lines
        elif config_name == 'runtime':  # runtime
            # Check that permissions have been normalized
            assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
            assert inspector.get_file_perms('/files/bin/mount') == '-00755'
            assert inspector.get_file_perms('/files/share/foo') == 'd00755'

            assert 'name=org.fedoraproject.Platform' in metadata_lines
        else:  # SDK
            assert 'name=org.fedoraproject.Sdk' in metadata_lines
Esempio n. 12
0
def mock_env(
    workflow,
    *,
    df_content: str,
    base_inspect: ImageInspectionData,
    # (dict[str, str] | str | None), but some tests intentionally pass the wrong type
    labels_plugin_arg: Optional[Any] = None,
    labels_reactor_conf: Optional[Dict[str, str]] = None,
    # list[list[str]], but same as above
    eq_conf: Optional[Any] = None,
    dont_overwrite: Optional[List[str]] = None,
    dont_overwrite_if_in_dockerfile: Optional[List[str]] = None,
    aliases: Optional[Dict[str, str]] = None,
    auto_labels: Optional[List[str]] = None,
) -> MockEnv:

    env = MockEnv(workflow).for_plugin(AddLabelsPlugin.key)

    args = {
        'labels': labels_plugin_arg,
        'dont_overwrite': dont_overwrite,
        'auto_labels': auto_labels,
        'aliases': aliases,
        'equal_labels': eq_conf,
    }
    if dont_overwrite_if_in_dockerfile is not None:
        args[
            'dont_overwrite_if_in_dockerfile'] = dont_overwrite_if_in_dockerfile

    env.set_plugin_args(args)

    df_path = Path(workflow.source.path) / "Dockerfile"
    df_path.write_text(df_content)

    def get_build(pipeline_run_name):
        start_time_json = {'status': {'startTime': TIME}}
        return start_time_json

    flexmock(OSBS, get_build=get_build)
    config_kwargs = {
        'namespace': workflow.namespace,
        'verify_ssl': True,
        'openshift_url': 'http://example.com/',
        'use_auth': True,
        'conf_file': None,
    }
    (flexmock(osbs.conf.Configuration).should_call("__init__").with_args(
        **config_kwargs))

    openshift_map = {
        'url': 'http://example.com/',
        'insecure': False,
        'auth': {
            'enable': True
        },
    }

    rcm = {'version': 1, 'openshift': openshift_map}
    if labels_reactor_conf is not None:
        rcm["image_labels"] = deepcopy(labels_reactor_conf)
    if eq_conf is not None:
        rcm["image_equal_labels"] = eq_conf
    workflow.conf.conf = rcm

    workflow.build_dir.init_build_dirs(["aarch64", "x86_64"], workflow.source)

    env.set_dockerfile_images(
        workflow.build_dir.any_platform.dockerfile.parent_images)

    flexmock(workflow.imageutil).should_receive(
        'base_image_inspect').and_return(base_inspect)
    flexmock(workflow.source).should_receive('get_vcs_info').and_return(
        VcsInfo(vcs_type="git",
                vcs_url=DOCKERFILE_GIT,
                vcs_ref=DOCKERFILE_SHA1))

    return env
Esempio n. 13
0
def mock_env(
    workflow,
    *,
    df_content: str,
    help_md: Optional[HelpMdFile] = None,
    plugin_args: Optional[Dict[str, str]] = None,
    mock_md2man: Callable[..., MockedPopen] = mock_md2man_success("man file content"),
) -> PluginsRunner:

    def get_build(pipeline_run_name):
        start_time_json = {'status': {'startTime': TIME}}
        return start_time_json

    flexmock(OSBS, get_build=get_build)
    config_kwargs = {
        'namespace': workflow.namespace,
        'verify_ssl': True,
        'openshift_url': 'http://example.com/',
        'use_auth': True,
        'conf_file': None,
    }
    (flexmock(osbs.conf.Configuration)
     .should_call("__init__")
     .with_args(**config_kwargs))

    openshift_map = {
        'url': 'http://example.com/',
        'insecure': False,
        'auth': {'enable': True},
    }

    rcm = {'version': 1, 'openshift': openshift_map}
    workflow.conf.conf = rcm

    env = MockEnv(workflow).for_plugin(AddHelpPlugin.key, plugin_args)
    source_dir = Path(workflow.source.path)

    (source_dir / "Dockerfile").write_text(df_content)
    if help_md:
        (source_dir / help_md.name).write_text(help_md.content)

    workflow.build_dir.init_build_dirs(["aarch64", "x86_64"], workflow.source)

    if help_md:
        def check_popen(*args, **kwargs):
            cmd = args[0]
            in_path, out_path = parse_md2man_args(cmd)

            assert in_path.name == help_md.name
            assert out_path.name == AddHelpPlugin.man_filename

            return mock_md2man(*args, **kwargs)

        (flexmock(subprocess)
         .should_receive("Popen")
         .once()
         .replace_with(check_popen))
    else:
        (flexmock(subprocess)
         .should_receive("Popen")
         .never())

    return env.create_runner()
def test_metadata_plugin_source(image_id, verify_media_results, expected_media_results, workflow):
    sources_for_nvr = 'image_build'
    sources_for_koji_build_id = '12345'

    fetch_sources_result = {
        'sources_for_koji_build_id': sources_for_koji_build_id,
        'sources_for_nvr': sources_for_nvr,
        'image_sources_dir': 'source_dir',
    }

    env = (MockEnv(workflow)
           .for_plugin(StoreMetadataPlugin.key)
           .set_plugin_args({"url": "http://example.com/"})
           .set_plugin_result(PLUGIN_FETCH_SOURCES_KEY, fetch_sources_result)
           .set_plugin_result(VerifyMediaTypesPlugin.key, verify_media_results))
    prepare(workflow)
    if image_id:
        workflow.data.koji_source_manifest = {'config': {'digest': image_id}}

    workflow.fs_watcher._data = dict(fs_data=None)

    initial_timestamp = datetime.now()
    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.data.plugins_timestamps = {
        PLUGIN_FETCH_SOURCES_KEY: timestamp,
    }
    workflow.data.plugins_durations = {
        PLUGIN_FETCH_SOURCES_KEY: 3.03,
    }
    workflow.data.plugins_errors = {}

    output = env.create_runner().run()

    assert StoreMetadataPlugin.key in output
    annotations = output[StoreMetadataPlugin.key]["annotations"]
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert annotations['image-id'] == (image_id if image_id else '')
    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert PLUGIN_FETCH_SOURCES_KEY in plugins_metadata["durations"]

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(list(set(media_types)))
    else:
        assert 'media-types' not in annotations
def test_skip_plugin(workflow, build_dir, caplog):
    workflow = mock_workflow(workflow, build_dir, "", user_params={})
    (MockEnv(workflow).for_plugin(
        FlatpakUpdateDockerfilePlugin.key).create_runner().run())
    assert 'not flatpak build, skipping plugin' in caplog.text
Esempio n. 16
0
def test_tag_and_push_plugin(workflow, monkeypatch, caplog, image_name,
                             should_raise, missing_v2, use_secret, file_name,
                             dockerconfig_contents):
    workflow.user_params['flatpak'] = True
    platforms = ['x86_64', 'ppc64le', 's390x', 'aarch64']
    workflow.data.tag_conf.add_unique_image(ImageName.parse(image_name))
    workflow.build_dir.init_build_dirs(platforms, workflow.source)

    secret_path = None
    if use_secret:
        temp_dir = mkdtemp()
        with open(os.path.join(temp_dir, file_name), "w+") as dockerconfig:
            dockerconfig.write(json.dumps(dockerconfig_contents))
            dockerconfig.flush()
            secret_path = temp_dir

    # Add a mock OCI image to 'flatpak_create_oci' results; this forces the tag_and_push
    # plugin to push with skopeo
    flatpak_create_oci_result: Dict[str, Any] = {}
    # Since we are always mocking the push for now, we can get away with a stub image
    for current_platform in platforms:
        metadata = deepcopy(IMAGE_METADATA_OCI)
        metadata['ref_name'] = f'app/org.gnome.eog/{current_platform}/master'
        flatpak_create_oci_result[current_platform] = metadata

    manifest_latest_url = "https://{}/v2/{}/manifests/latest".format(
        LOCALHOST_REGISTRY, TEST_IMAGE)
    manifest_url = "https://{}/v2/{}/manifests/{}".format(
        LOCALHOST_REGISTRY, TEST_IMAGE, DIGEST_V2)

    # We return our v2 manifest in the mocked v1 response as a placeholder - only the
    # digest matters anyways
    manifest_response_v1 = requests.Response()
    (flexmock(manifest_response_v1,
              status_code=200,
              headers={
                  'Content-Type':
                  'application/vnd.docker.distribution.manifest.v1+json',
                  'Docker-Content-Digest': DIGEST_V1
              }))

    manifest_response_v2 = requests.Response()
    (flexmock(manifest_response_v2,
              status_code=200,
              headers={
                  'Content-Type':
                  'application/vnd.docker.distribution.manifest.v2+json',
                  'Docker-Content-Digest': DIGEST_V2
              }))
    manifest_response_v2_list = requests.Response()
    (flexmock(manifest_response_v2_list,
              raise_for_status=lambda: None,
              headers={
                  'Content-Type':
                  'application/vnd.docker.distribution.manifest.list.v2+json',
              }))
    if should_raise:
        (flexmock(retries).should_receive('run_cmd').and_raise(
            subprocess.CalledProcessError(1,
                                          'echo',
                                          output=b'something went wrong')))
    else:
        (flexmock(retries).should_receive('run_cmd').and_return(0))

    manifest_unknown_response = requests.Response()
    (flexmock(manifest_unknown_response,
              status_code=404,
              json={"errors": [{
                  "code": "MANIFEST_UNKNOWN"
              }]}))

    def custom_get(method, url, headers, **kwargs):
        if url.startswith(manifest_latest_url):
            # For a manifest stored as v2 or v1, the docker registry defaults to
            # returning a v1 manifest if a v2 manifest is not explicitly requested
            if headers[
                    'Accept'] == 'application/vnd.docker.distribution.manifest.v2+json':
                if missing_v2:
                    return manifest_unknown_response
                else:
                    return manifest_response_v2
            elif headers[
                    'Accept'] == 'application/vnd.docker.distribution.manifest.list.v2+json':
                return manifest_response_v2_list
            else:
                return manifest_response_v1

        if url == manifest_url:
            if missing_v2:
                return manifest_unknown_response
            else:
                return manifest_response_v2

    mock_get_retry_session()
    (flexmock(
        requests.Session).should_receive('request').replace_with(custom_get))
    (flexmock(time).should_receive('sleep').and_return(None))

    reactor_config = {
        'registries': [{
            'url': LOCALHOST_REGISTRY,
            'insecure': True,
            'auth': {
                'cfg_path': secret_path
            },
        }]
    }
    runner = (MockEnv(workflow).for_plugin(
        TagAndPushPlugin.key).set_reactor_config(
            reactor_config).set_plugin_result(
                CheckAndSetPlatformsPlugin.key, platforms).set_plugin_result(
                    FlatpakCreateOciPlugin.key,
                    flatpak_create_oci_result).create_runner())
    add_koji_map_in_workflow(workflow, hub_url='', root_url='')

    if should_raise:
        with pytest.raises(PluginFailedException):
            runner.run()
    else:
        runner.run()
        assert workflow.conf.registry
        repos_annotations = get_repositories_annotations(
            workflow.data.tag_conf)
        assert workflow.data.annotations['repositories'] == repos_annotations

        if MOCK:
            # we only test this when mocking docker because we don't expect
            # running actual docker against v2 registry
            if missing_v2:
                assert "Retrying push because V2 schema 2" in caplog.text
Esempio n. 17
0
def test_group_manifests(workflow, source_dir, schema_version, test_name, group, foreign_layers,
                         per_platform_images, expected_exception, user_params):
    test_images = ['namespace/httpd:2.4',
                   'namespace/httpd:latest']

    goarch = {
        'ppc64le': 'powerpc',
        'x86_64': 'amd64',
    }

    registry_conf = {REGISTRY_V2: {'version': 'v2', 'insecure': True}}

    temp_dir = mkdtemp(dir=str(source_dir))
    with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig:
        dockerconfig_contents = {
            REGISTRY_V2: {
                "username": "******", "password": DOCKER0_REGISTRY
            }
        }
        dockerconfig.write(json.dumps(dockerconfig_contents))
        dockerconfig.flush()
        registry_conf[REGISTRY_V2]['secret'] = temp_dir

    registry_images_conf = {
        platform: {REGISTRY_V2: images} for platform, images in per_platform_images.items()
    }

    mocked_registries, platform_digests = mock_registries(registry_conf, registry_images_conf,
                                                          schema_version=schema_version,
                                                          foreign_layers=foreign_layers)

    some_per_platform_image = next(
        image for images in per_platform_images.values() for image in images
    )
    # NOTE: this test assumes that all the images in per_platform_images follow the format of
    #   {noarch_image}-{platform}. If they don't, this test will fail with cryptic errors
    noarch_image, *_ = some_per_platform_image.rsplit("-", 1)
    mock_environment(workflow, unique_image=noarch_image, primary_images=test_images)

    platform_descriptors_list = []
    for platform, arch in goarch.items():
        new_plat = {
            'platform': platform,
            'architecture': arch,
        }
        platform_descriptors_list.append(new_plat)

    runner = (
        MockEnv(workflow)
        .for_plugin(GroupManifestsPlugin.key)
        .set_check_platforms_result(list(per_platform_images.keys()))
        .set_reactor_config(
            {
                'version': 1,
                'group_manifests': group,
                'registry': {
                    'url': f'https://{REGISTRY_V2}/{registry_conf[REGISTRY_V2]["version"]}',
                    'auth': True,
                },
                'registries_cfg_path': str(temp_dir),
                'platform_descriptors': platform_descriptors_list,
            }
        )
        .create_runner()
    )

    if expected_exception is None:
        results = runner.run()

        manifest_type, list_type = {
            'v2': (
                'application/vnd.docker.distribution.manifest.v2+json',
                'application/vnd.docker.distribution.manifest.list.v2+json',
            ),
            'oci': (
                'application/vnd.oci.image.manifest.v1+json',
                'application/vnd.oci.image.index.v1+json',
            ),
        }[schema_version]

        def verify_manifest_in_repository(registry, repo, manifest, platform, tag=None):
            config = 'config-' + platform
            assert registry.get_blob(repo, make_digest(config)) == config
            layer = 'layer-' + platform
            assert registry.get_blob(repo, make_digest(layer)) == layer
            assert registry.get_manifest(repo, make_digest(manifest)) == manifest
            if tag is not None:
                assert registry.get_manifest(repo, tag) == manifest

        if group:
            source_builds = {}
            source_manifests = {}

            for platform in per_platform_images:
                build = platform_digests[platform]['digests'][0]
                source_builds[platform] = build
                source_registry = mocked_registries[build['registry']]
                source_manifests[platform] = source_registry.get_manifest(build['repository'],
                                                                          build['digest'])

            for registry, conf in registry_conf.items():
                target_registry = mocked_registries[registry]
                for image in test_images:
                    name, tag = image.split(':')

                    if tag not in target_registry.get_repo(name)['tags']:
                        continue

                    raw_manifest_list = to_text(target_registry.get_manifest(name, tag))
                    manifest_list = json.loads(raw_manifest_list, object_pairs_hook=OrderedDict)

                    # Check if the manifest list is sorted
                    assert json.dumps(manifest_list, indent=4, sort_keys=True,
                                      separators=(',', ': ')) == raw_manifest_list
                    arch_list = [m['platform']['architecture'] for m in manifest_list['manifests']]
                    assert arch_list == sorted(arch_list)

                    assert manifest_list['mediaType'] == list_type
                    assert manifest_list['schemaVersion'] == 2

                    manifests = manifest_list['manifests']
                    assert all(d['mediaType'] == manifest_type for d in manifests)
                    assert all(d['platform']['os'] == 'linux' for d in manifests)

                    for platform in platform_digests:
                        descs = [d for d in manifests
                                 if d['platform']['architecture'] == goarch[platform]]
                        assert len(descs) == 1
                        assert descs[0]['digest'] == source_builds[platform]['digest']

                        verify_manifest_in_repository(target_registry, name,
                                                      source_manifests[platform], platform)

        else:
            platforms = list(platform_digests)
            assert len(platforms) == 1
            platform = platforms[0]

            source_build = platform_digests[platform]['digests'][0]
            source_registry = mocked_registries[source_build['registry']]
            source_manifest = source_registry.get_manifest(source_build['repository'],
                                                           source_build['digest'])

            for registry, conf in registry_conf.items():
                if conf['version'] == 'v1':
                    continue

                target_registry = mocked_registries[registry]
                for image in get_primary_images(workflow):
                    repo = image.to_str(registry=False, tag=False)
                    if image.tag not in target_registry.get_repo(repo)['tags']:
                        continue
                    verify_manifest_in_repository(target_registry, repo,
                                                  source_manifest, platform,
                                                  image.tag)
                for image in get_floating_images(workflow):
                    repo = image.to_str(registry=False, tag=False)
                    assert image.tag not in target_registry.get_repo(repo)['tags']

        # Check that plugin returns ManifestDigest object
        plugin_results = results[GroupManifestsPlugin.key]

        result_digest = plugin_results["manifest_digest"]
        assert isinstance(result_digest, ManifestDigest)

        result_digest = plugin_results["manifest_digest"]
        assert isinstance(result_digest, ManifestDigest)
        assert plugin_results["media_type"]
        assert plugin_results["manifest"]

    else:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex.value)
def test_running_build(workflow, caplog, sources_dir, sources_dir_exists,
                       sources_dir_empty, remote_dir, remote_dir_exists,
                       remote_dir_empty, maven_dir, maven_dir_exists,
                       maven_dir_empty, export_failed):
    """
    Test if proper result is returned and if plugin works
    """
    build_sources_dir = workflow.build_dir.source_container_sources_dir
    sources_dir_path = build_sources_dir / sources_dir
    if sources_dir_exists:
        sources_dir_path.mkdir()
        if not sources_dir_empty:
            os.mknod(sources_dir_path / 'stub.srpm')

    remote_dir_path = build_sources_dir / remote_dir
    if remote_dir_exists:
        remote_dir_path.mkdir()
        if not remote_dir_empty:
            os.mknod(remote_dir_path / 'remote-sources-first.tar.gz')
            os.mknod(remote_dir_path / 'remote-sources-second.tar.gz')

    maven_dir_path = build_sources_dir / maven_dir
    if maven_dir_exists:
        maven_dir_path.mkdir()
        if not maven_dir_empty:
            os.mkdir(maven_dir_path / 'maven-sources-1')
            os.mknod(maven_dir_path / 'maven-sources-1' /
                     'maven-sources-1.tar.gz')

    workflow.build_dir.init_build_dirs(["noarch"], workflow.source)

    fetch_sources_result = {
        'image_sources_dir': str(sources_dir_path),
        'remote_sources_dir': str(remote_dir_path),
        'maven_sources_dir': str(maven_dir_path),
    }
    runner = (MockEnv(workflow).for_plugin(
        SourceContainerPlugin.key).set_plugin_result(
            FetchSourcesPlugin.key, fetch_sources_result).create_runner())

    temp_image_output_dir = workflow.build_dir.source_container_output_dir
    exported_image_file = workflow.build_dir.any_platform.exported_squashed_image
    temp_image_export_dir = exported_image_file.parent
    tempfile_chain = (flexmock(tempfile).should_receive("mkdtemp").and_return(
        str(temp_image_output_dir)))
    tempfile_chain.and_return(str(temp_image_export_dir))
    temp_image_export_dir.mkdir(parents=True, exist_ok=True)
    temp_image_output_dir.joinpath('blobs', 'sha256').mkdir(parents=True,
                                                            exist_ok=True)
    # temp dir created by bsi
    flexmock(os).should_receive('getcwd').and_return(
        str(workflow.build_dir.path))
    temp_bsi_dir = workflow.build_dir.path / 'SrcImg'
    temp_bsi_dir.mkdir()

    def check_run_skopeo(args):
        """Mocked call to skopeo"""
        assert args[0] == 'skopeo'
        assert args[1] == 'copy'
        assert args[2] == 'oci:%s' % temp_image_output_dir
        assert args[3] == f'docker-archive:{exported_image_file}'

        if export_failed:
            raise subprocess.CalledProcessError(returncode=1,
                                                cmd=args,
                                                output="Failed")

        return ''

    def check_check_output(args, **kwargs):
        """Mocked check_output call for bsi"""
        args_expect = ['bsi', '-d']
        drivers = set()
        if sources_dir and sources_dir_exists:
            drivers.add('sourcedriver_rpm_dir')
        if remote_dir and remote_dir_exists:
            drivers.add('sourcedriver_extra_src_dir')
        if maven_dir and maven_dir_exists:
            drivers.add('sourcedriver_extra_src_dir')
        args_expect.append(','.join(drivers))

        if sources_dir and sources_dir_exists:
            args_expect.append('-s')
            args_expect.append(str(sources_dir_path))
        if remote_dir and remote_dir_exists:
            for count in range(len(os.listdir(remote_dir_path))):
                args_expect.append('-e')
                args_expect.append(
                    str(remote_dir_path / f"remote_source_{count}"))
        if maven_dir and maven_dir_exists:
            for maven_subdir in os.listdir(maven_dir_path):
                args_expect.append('-e')
                args_expect.append(str(maven_dir_path / maven_subdir))
        args_expect.append('-o')
        args_expect.append(str(temp_image_output_dir))

        assert args == args_expect
        return 'stub stdout'

    any_sources = any(
        [sources_dir_exists, remote_dir_exists, maven_dir_exists])

    (flexmock(retries).should_receive("run_cmd").times(
        1 if any_sources else 0).replace_with(check_run_skopeo))

    (flexmock(subprocess).should_receive("check_output").times(
        1 if any_sources else 0).replace_with(check_check_output))

    blob_sha = "f568c411849e21aa3917973f1c5b120f6b52fe69b1944dfb977bc11bed6fbb6d"
    index_json = {
        "schemaVersion":
        2,
        "manifests": [{
            "mediaType": "application/vnd.oci.image.manifest.v1+json",
            "digest": "sha256:%s" % blob_sha,
            "size": 645,
            "annotations": {
                "org.opencontainers.image.ref.name": "latest-source"
            },
            "platform": {
                "architecture": "amd64",
                "os": "linux"
            }
        }]
    }
    blob_json = {"schemaVersion": 2, "layers": []}
    expected_exported_image_metadata = {}

    temp_image_output_dir.joinpath("index.json").write_text(
        json.dumps(index_json), "utf-8")
    temp_image_output_dir.joinpath("blobs", "sha256", blob_sha).write_text(
        json.dumps(blob_json), "utf-8")

    if not export_failed:
        export_tar = workflow.build_dir.any_platform.exported_squashed_image
        with open(export_tar, "wb") as f:
            with tarfile.TarFile(mode="w", fileobj=f) as tf:
                for f in os.listdir(temp_image_output_dir):
                    tf.add(str(temp_image_output_dir / f), f)
        expected_exported_image_metadata = get_exported_image_metadata(
            str(export_tar), IMAGE_TYPE_DOCKER_ARCHIVE)

    if not any([sources_dir_exists, remote_dir_exists, maven_dir_exists]):
        with pytest.raises(PluginFailedException) as exc_info:
            runner.run()
        err_msg = exc_info.value.args[0]
        assert re.search("No SRPMs directory", err_msg)
        assert re.search("No Remote source directory", err_msg)
        assert re.search("No Maven source directory", err_msg)

        err_msg = f"No SRPMs directory '{sources_dir_path}' available"
        err_msg += f"\nNo Remote source directory '{remote_dir_path}' available"
        err_msg += f"\nNo Maven source directory '{maven_dir_path}' available"
        # Since Python 3.7 logger adds additional whitespaces by default -> checking without them
        assert re.sub(r'\s+', " ", err_msg) in re.sub(r'\s+', " ", caplog.text)

    elif export_failed:
        with pytest.raises(PluginFailedException):
            runner.run()
    else:
        runner.run()
        result = workflow.data.plugins_results[SourceContainerPlugin.key]
        assert result.keys() == {'image_metadata', 'logs'}
        assert result['logs'] == ['stub stdout']
        assert result['image_metadata'] == expected_exported_image_metadata
        assert 'stub stdout' in caplog.text
        empty_srpm_msg = f"SRPMs directory '{sources_dir_path}' is empty"
        empty_remote_msg = f"Remote source directory '{remote_dir_path}' is empty"
        empty_maven_msg = f"Maven source directory '{maven_dir_path}' is empty"
        if sources_dir_exists and sources_dir_empty:
            assert empty_srpm_msg in caplog.text
        else:
            assert empty_srpm_msg not in caplog.text
        if remote_dir_exists and remote_dir_empty:
            assert empty_remote_msg in caplog.text
        else:
            assert empty_remote_msg not in caplog.text
        if maven_dir_exists and maven_dir_empty:
            assert empty_maven_msg in caplog.text
        else:
            assert empty_maven_msg not in caplog.text

        remove_srpm_msg = f"Will remove directory with downloaded srpms: {sources_dir_path}"
        remove_remote_msg = f"Will remove directory with downloaded remote sources: " \
                            f"{remote_dir_path}"
        remove_maven_msg = f"Will remove directory with downloaded maven sources: " \
                           f"{maven_dir_path}"
        if sources_dir_exists:
            assert remove_srpm_msg in caplog.text
        else:
            assert remove_srpm_msg not in caplog.text
        if remote_dir_exists:
            assert remove_remote_msg in caplog.text
        else:
            assert remove_remote_msg not in caplog.text
        if maven_dir_exists:
            assert remove_maven_msg in caplog.text
        else:
            assert remove_maven_msg not in caplog.text

        remove_unpacked_msg = f"Will remove unpacked image directory: {temp_image_output_dir}"
        assert remove_unpacked_msg in caplog.text

        remove_tmpbsi_msg = f"Will remove BSI temporary directory: {temp_bsi_dir}"
        assert remove_tmpbsi_msg in caplog.text
def test_skip_plugin(workflow, caplog):
    workflow.user_params['scratch'] = True
    (MockEnv(workflow).for_plugin(
        CompareComponentsPlugin.key).set_dockerfile_images(
            ['fedora:36']).set_scratch(True).create_runner().run())
    assert 'scratch build, skipping plugin' in caplog.text
def run_plugin(workflow):
    result = MockEnv(workflow).for_plugin(
        ChangeFromPlugin.key).create_runner().run()
    return result[ChangeFromPlugin.key]
Esempio n. 21
0
    def test_render_mail(self, workflow, source_dir, caplog,
                         manual_cancel, to_koji_submitter,
                         koji_integration, success, has_repositories,
                         has_store_metadata_results, annotations, empty_repositories,
                         expect_error):
        git_source_url = 'git_source_url'
        git_source_ref = '123423431234123'
        VcsInfo = namedtuple('VcsInfo', ['vcs_type', 'vcs_url', 'vcs_ref'])
        session = MockedClientSession('', has_kerberos=True)
        pathinfo = MockedPathInfo('https://koji')
        (flexmock(pathinfo)
            .should_receive('work')
            .and_raise(RuntimeError, "xyz"))

        flexmock(koji, ClientSession=lambda hub, opts: session, PathInfo=pathinfo)
        kwargs = {
            'url': 'https://something.com',
            'smtp_host': 'smtp.bar.com',
            'from_address': '*****@*****.**',
            'to_koji_submitter': to_koji_submitter,
            'to_koji_pkgowner': False
        }

        workflow.data.plugins_results[KojiImportPlugin.key] = MOCK_KOJI_BUILD_ID
        workflow.user_params['koji_task_id'] = MOCK_KOJI_TASK_ID

        mock_dockerfile(workflow)

        flexmock(workflow.source, get_vcs_info=VcsInfo(vcs_type='git',
                                                       vcs_url=git_source_url,
                                                       vcs_ref=git_source_ref))

        MockEnv(workflow).mock_build_outcome(failed=True, cancelled=manual_cancel)

        if has_store_metadata_results:
            if annotations:
                if empty_repositories:
                    mock_store_metadata_results(workflow, {'repositories': {}})
                else:
                    mock_store_metadata_results(workflow)
                if not has_repositories:
                    result = workflow.data.plugins_results[StoreMetadataPlugin.key]
                    del result['annotations']['repositories']
            else:
                mock_store_metadata_results(workflow, {})

        smtp_map = {
            'from_address': '*****@*****.**',
            'host': 'smtp.bar.com',
            'send_to_submitter': to_koji_submitter,
            'send_to_pkg_owner': False,
        }
        rcm = {'version': 1, 'smtp': smtp_map, 'openshift': {'url': 'https://something.com'}}
        workflow.conf = Configuration(raw_config=rcm)
        add_koji_map_in_workflow(workflow,
                                 hub_url='/' if koji_integration else None,
                                 root_url='https://koji/',
                                 ssl_certs_dir='/certs')

        p = SendMailPlugin(workflow, **kwargs)

        # Submitter is updated in _get_receivers_list
        try:
            p._get_receivers_list()
        except RuntimeError as ex:
            # Only valid exception is a RuntimeError when there are no
            # recipients available
            assert str(ex) == 'No recipients found'

        if expect_error:
            with pytest.raises(ValueError):
                p._render_mail(success, manual_cancel)
            return

        subject, body, logs = p._render_mail(success, manual_cancel)

        if manual_cancel:
            status = 'Canceled'
            assert not logs
        elif success:
            status = 'Succeeded'
            assert not logs
        else:
            status = 'Failed'

        if not has_repositories or empty_repositories:
            exp_subject = '%s building image %s' % (status, MOCK_NAME_LABEL)
            exp_body = [
                'Image Name: ' + MOCK_NAME_LABEL,
                'Repositories: ',
                ]
        else:
            exp_subject = '%s building image foo/bar' % status
            exp_body = [
                'Image Name: foo/bar',
                'Repositories: ',
                '    foo/bar:baz',
                '    foo/bar:spam',
            ]

        result = workflow.data.plugins_results[StoreMetadataPlugin.key]
        if 'repositories' not in result['annotations']:
            assert "repositories is not included in annotations" in caplog.text

        common_body = [
            'Status: ' + status,
            'Submitted by: ',
            'Task id: ' + str(MOCK_KOJI_TASK_ID),
            'Source url: ' + git_source_url,
            'Source ref: ' + git_source_ref,
        ]
        exp_body.extend(common_body)

        if koji_integration and to_koji_submitter:
            exp_body[-4] += MOCK_KOJI_SUBMITTER_EMAIL
        else:
            exp_body[-4] += SendMailPlugin.DEFAULT_SUBMITTER

        if koji_integration:
            exp_body.insert(-2, "Logs: <not available>")

        assert subject == exp_subject
        assert body == '\n'.join(exp_body)
def test_flatpak_update_dockerfile(workflow, build_dir, config_name, breakage):
    config = CONFIGS[config_name]

    container_yaml = config['container_yaml']

    workflow = mock_workflow(workflow, build_dir, container_yaml)

    if breakage == 'branch_mismatch':
        config = deepcopy(config)
        base_module = config['modules'][config['base_module']]
        base_module['metadata'] = base_module['metadata'].replace(
            'branch: f28', 'branch: MISMATCH')

        expected_exception = "Mismatch for 'branch'"
    elif breakage == 'no_compose':
        config = deepcopy(config)
        config['odcs_composes'] = []
        expected_exception = "Can't find main module"
    else:
        assert breakage is None
        expected_exception = None

    mock_koji_session(config)

    # composes run by resolve_composes plugin
    setup_flatpak_composes(workflow, config)

    secrets_path = build_dir / "secret"
    secrets_path.mkdir()
    secrets_path.joinpath("token").write_text("green_eggs_and_ham", "utf-8")

    rcm = {
        'version': 1,
        'odcs': {
            'api_url':
            ODCS_URL,
            'auth': {
                'openidc_dir': secrets_path
            },
            'signing_intents': [
                {
                    'name': 'unsigned',
                    'keys': [],
                },
                {
                    'name': 'release',
                    'keys': ['R123', 'R234'],
                },
                {
                    'name': 'beta',
                    'keys': ['R123', 'B456', 'B457'],
                },
            ],
            'default_signing_intent':
            'unsigned'
        },
        'koji': {
            'auth': {},
            'hub_url': 'https://koji.example.com/hub'
        }
    }

    runner = (MockEnv(workflow).for_plugin(
        FlatpakUpdateDockerfilePlugin.key).set_reactor_config(
            rcm).create_runner())

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex.value)
    else:
        runner.run()

        assert os.path.exists(workflow.build_dir.any_platform.dockerfile_path)
        df = workflow.build_dir.any_platform.dockerfile.content

        m = re.search(r'module enable\s*(.*?)\s*$', df, re.MULTILINE)
        assert m
        enabled_modules = sorted(m.group(1).split())

        if config_name == 'app':
            assert enabled_modules == ['eog:f28', 'flatpak-runtime:f28']
        else:
            assert enabled_modules == ['flatpak-runtime:f28']

        includepkgs_path = os.path.join(workflow.build_dir.any_platform.path,
                                        'atomic-reactor-includepkgs')
        assert os.path.exists(includepkgs_path)
        with open(includepkgs_path) as f:
            includepkgs = f.read()
            assert 'librsvg2' in includepkgs
            if config_name == 'app':
                assert 'eog-0:3.28.3-1.module_2123+73a9ef6f.x86_64' in includepkgs

        assert os.path.exists(
            os.path.join(workflow.build_dir.any_platform.path, 'cleanup.sh'))

        resolve_comp_result = workflow.data.plugins_results.get(
            PLUGIN_RESOLVE_COMPOSES_KEY)
        flatpak_util = FlatpakUtil(workflow_config=workflow.conf,
                                   source_config=workflow.source.config,
                                   composes=resolve_comp_result['composes'])
        compose_info = flatpak_util.get_flatpak_compose_info()
        assert compose_info.source_spec == config['source_spec']

        if config_name == 'app':
            assert compose_info.main_module.name == 'eog'
            assert compose_info.main_module.stream == 'f28'
            assert compose_info.main_module.version == '20170629213428'
            assert (compose_info.main_module.mmd.get_summary("C") ==
                    'Eye of GNOME Application Module')
            assert compose_info.main_module.rpms == [
                'eog-0:3.28.3-1.module_2123+73a9ef6f.src.rpm',
                'eog-0:3.28.3-1.module_2123+73a9ef6f.x86_64.rpm',
                'eog-0:3.28.3-1.module_2123+73a9ef6f.ppc64le.rpm',
            ]

        source_info = flatpak_util.get_flatpak_source_info()
        assert source_info.base_module.name == config['base_module']
def test_flatpak_create_dockerfile(workflow, source_dir, config_name,
                                   override_base_image, breakage):
    config = CONFIGS[config_name]

    modules = None
    if breakage == 'no_modules':
        modules = []
        expected_exception = "a module is required for Flatpaks"
    elif breakage == 'multiple_modules':
        modules = ['eog:f28:20170629213428', 'flatpak-common:f28:123456']
        expected_exception = None  # Just a warning
    else:
        assert breakage is None
        expected_exception = None

    data = yaml.safe_load(config['container_yaml'])
    if override_base_image is not None:
        data['flatpak']['base_image'] = override_base_image
    if modules is not None:
        data['compose']['modules'] = modules
    container_yaml = yaml.dump(data)

    platforms = ["x86_64", "s390x"]
    mock_workflow(workflow, source_dir, container_yaml, platforms)

    base_image = "registry.fedoraproject.org/fedora:latest"

    reactor_config = {
        'version': 1,
        'flatpak': {
            'base_image': base_image
        },
        'source_registry': {
            'url': 'source_registry'
        },
    }

    runner = (MockEnv(workflow).for_plugin(
        FlatpakCreateDockerfilePlugin.key).set_reactor_config(
            reactor_config).create_runner())

    if expected_exception:
        with pytest.raises(PluginFailedException) as ex:
            runner.run()
        assert expected_exception in str(ex.value)
    else:
        runner.run()

        flatpak_util = FlatpakUtil(workflow_config=None,
                                   source_config=workflow.source.config)
        source_spec = flatpak_util.get_flatpak_source_spec()
        assert source_spec == config['source_spec']

        expect_base_image = override_base_image if override_base_image else base_image

        for platform in platforms:
            build_dir = BuildDir(workflow.build_dir.path / platform, platform)
            df = build_dir.dockerfile_path.read_text("utf-8")

            assert "FROM " + expect_base_image in df
            assert 'name="{}"'.format(config['name']) in df
            assert 'com.redhat.component="{}"'.format(
                config['component']) in df
            assert "RUN rm -f /etc/yum.repos.d/*" in df
            assert "ADD atomic-reactor-repos/* /etc/yum.repos.d/" in df
def test_metadata_plugin(workflow, source_dir,
                         help_results, expected_help_results, base_from_scratch,
                         verify_media_results, expected_media_results):
    if base_from_scratch:
        df_content = dedent("""\
            FROM fedora
            RUN yum install -y python-django
            CMD blabla
            FROM scratch
            RUN yum install -y python
            """)
    else:
        df_content = dedent("""\
            FROM fedora
            RUN yum install -y python-django
            CMD blabla
            """)

    prepare(workflow)
    mock_dockerfile(workflow, df_content)

    dockerfile = workflow.build_dir.any_platform.dockerfile_with_parent_env(
        workflow.imageutil.base_image_inspect()
    )

    df_images = DockerfileImages(dockerfile.parent_images)
    for parent in dockerfile.parent_images:
        if parent != 'scratch':
            df_images[parent] = "sha256:spamneggs"

    env = (MockEnv(workflow)
           .for_plugin(StoreMetadataPlugin.key)
           .set_plugin_args({"url": "http://example.com/"})
           .set_dockerfile_images(df_images)
           .set_plugin_result(RPMqaPlugin.key, "rpm1\nrpm2")
           .set_plugin_result(VerifyMediaTypesPlugin.key, verify_media_results)
           .set_plugin_result(AddHelpPlugin.key, help_results))

    if help_results is not None:
        workflow.data.annotations['help_file'] = help_results['help_file']

    workflow.fs_watcher._data = dict(fs_data=None)

    initial_timestamp = datetime.now()
    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.data.plugins_timestamps = {
        RPMqaPlugin.key: timestamp,
    }
    workflow.data.plugins_durations = {
        RPMqaPlugin.key: 3.03,
    }
    workflow.data.plugins_errors = {}

    output = env.create_runner().run()

    assert StoreMetadataPlugin.key in output
    annotations = output[StoreMetadataPlugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])
    assert annotations['commit_id'] == 'commit'

    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "parent_images" in annotations
    assert is_string_type(annotations['parent_images'])
    if base_from_scratch:
        assert annotations["base-image-name"] == ""
        assert annotations["base-image-id"] == ""
        assert '"scratch": "scratch"' in annotations['parent_images']
    else:
        assert annotations["base-image-name"] ==\
               workflow.data.dockerfile_images.original_base_image
        assert annotations["base-image-id"] != ""

        assert (workflow.data.dockerfile_images.base_image.to_str() in
                annotations['parent_images'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if expected_help_results is False:
        assert 'help_file' not in annotations
    else:
        assert json.loads(annotations['help_file']) == expected_help_results

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(list(set(media_types)))
    else:
        assert 'media-types' not in annotations
def mock_env(tmpdir,
             docker_tasker,
             has_appregistry_label=False,
             appregistry_label=False,
             has_bundle_label=True,
             bundle_label=True,
             has_archive=True,
             scratch=False,
             orchestrator=False,
             selected_platform=True,
             empty_archive=False,
             remove_fails=False,
             change_csv_content=False,
             multiple_csv=False):
    repo_dir = tmpdir.join('test-operator').mkdir()
    mock_dockerfile(repo_dir,
                    has_appregistry_label=has_appregistry_label,
                    appregistry_label=appregistry_label,
                    has_bundle_label=has_bundle_label,
                    bundle_label=bundle_label)
    manifests_dir = mock_manifests_dir(repo_dir)

    env = (MockEnv().for_plugin(
        'postbuild', ExportOperatorManifestsPlugin.key).set_scratch(scratch))
    if orchestrator:
        env.make_orchestrator()

    class MockSource(object):
        @property
        def manifests_dir(self):
            return manifests_dir

    # Set a new source object, only the manifests_dir property is required for tests.
    setattr(env.workflow, 'source', MockSource())
    env.workflow.builder.set_df_path(str(repo_dir))

    mock_stream = generate_archive(tmpdir, empty_archive, change_csv_content,
                                   multiple_csv)
    if selected_platform:
        env.set_plugin_args({
            'operator_manifests_extract_platform': machine(),
            'platform': machine()
        })

    (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
        'create_container').with_args(env.workflow.image,
                                      command=["/bin/bash"]).and_return(
                                          {'Id': CONTAINER_ID}))

    if remove_fails:
        (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
            'remove_container').with_args(CONTAINER_ID).and_raise(
                Exception('error')))
    else:
        (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
            'remove_container').with_args(CONTAINER_ID))

    if has_archive:
        (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
            'get_archive').with_args(CONTAINER_ID,
                                     '/manifests').and_return(mock_stream, {}))
    elif has_archive is not None:
        response = Response()
        response.status_code = 404
        (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
            'get_archive').with_args(CONTAINER_ID, '/manifests').and_raise(
                NotFound('Not found', response=response)))
    else:
        response = Response()
        response.status_code = 500
        (flexmock(docker_tasker.tasker.d.wrapped).should_receive(
            'get_archive').with_args(CONTAINER_ID, '/manifests').and_raise(
                Exception('error')))

    return env.create_runner(docker_tasker)
Esempio n. 26
0
    def test_hide_files_multi_stage(self, workflow):
        df_content = dedent("""\
            FROM sha256:123456 as builder
            RUN blah
            USER custom_user
            RUN bluh

            FROM sha256:654321 as unused
            RUN bleh

            FROM sha256:123456
            RUN yum install -y python-flask
            USER custom_user2
            CMD /bin/bash
            """)
        hide_files = {'tmpdir': '/tmp', 'files': ['/etc/yum.repos.d/repo_ignore_1.repo',
                                                  '/etc/yum.repos.d/repo_ignore_2.repo']}
        parent_images = [
            'sha256:123456',
            'sha256:654321',
        ]

        self.prepare(workflow,
                     df_content,
                     hide_files=hide_files,
                     parent_images=parent_images,
                     inherited_user="******")

        (MockEnv(workflow)
         .for_plugin(HideFilesPlugin.key)
         .create_runner()
         .run())

        expected_df_content = dedent("""\
            FROM sha256:123456 as builder
            USER root
            RUN mv -f /etc/yum.repos.d/repo_ignore_1.repo /tmp || :
            RUN mv -f /etc/yum.repos.d/repo_ignore_2.repo /tmp || :
            USER inherited_user
            RUN blah
            USER custom_user
            RUN bluh
            USER root
            RUN mv -fZ /tmp/repo_ignore_1.repo /etc/yum.repos.d/repo_ignore_1.repo || :
            RUN mv -fZ /tmp/repo_ignore_2.repo /etc/yum.repos.d/repo_ignore_2.repo || :
            USER custom_user

            FROM sha256:654321 as unused
            USER root
            RUN mv -f /etc/yum.repos.d/repo_ignore_1.repo /tmp || :
            RUN mv -f /etc/yum.repos.d/repo_ignore_2.repo /tmp || :
            USER inherited_user
            RUN bleh
            USER root
            RUN mv -fZ /tmp/repo_ignore_1.repo /etc/yum.repos.d/repo_ignore_1.repo || :
            RUN mv -fZ /tmp/repo_ignore_2.repo /etc/yum.repos.d/repo_ignore_2.repo || :
            USER inherited_user

            FROM sha256:123456
            USER root
            RUN mv -f /etc/yum.repos.d/repo_ignore_1.repo /tmp || :
            RUN mv -f /etc/yum.repos.d/repo_ignore_2.repo /tmp || :
            USER inherited_user
            RUN yum install -y python-flask
            USER custom_user2
            CMD /bin/bash
            USER root
            RUN mv -fZ /tmp/repo_ignore_1.repo /etc/yum.repos.d/repo_ignore_1.repo || :
            RUN mv -fZ /tmp/repo_ignore_2.repo /etc/yum.repos.d/repo_ignore_2.repo || :
            USER custom_user2
            """)
        check_df_content(expected_df_content, workflow)
Esempio n. 27
0
def test_rpmqa_plugin_exception(workflow):
    platforms = ['x86_64', 's390x', 'ppc64le', 'aarch64']
    workflow.build_dir.init_build_dirs(platforms, workflow.source)
    runner = MockEnv(workflow).for_plugin(RPMqaPlugin.key).create_runner()
    with pytest.raises(PluginFailedException):
        runner.run()
Esempio n. 28
0
def test_tag_and_push_plugin_oci(workflow, monkeypatch, is_source_build, v2s2,
                                 unsupported_image_type, use_secret, fail_push,
                                 caplog):
    sources_koji_id = '123456'
    sources_koji_target = 'source_target'
    sources_koji_repo = 'namespace/container_build_image'
    sources_koji_pull_spec = 'registry_url/{}@sha256:987654321'.format(
        sources_koji_repo)
    sources_random_number = 1234567890
    sources_timestamp = datetime(year=2019, month=12, day=12)
    current_platform = platform.processor() or 'x86_64'
    sources_tagname = '{}-{}-{}-{}'.format(
        sources_koji_target, sources_random_number,
        sources_timestamp.strftime('%Y%m%d%H%M%S'), current_platform)

    secret_path = None
    if use_secret:
        temp_dir = mkdtemp()
        with open(os.path.join(temp_dir, ".dockercfg"), "w+") as dockerconfig:
            dockerconfig_contents = {
                LOCALHOST_REGISTRY: {
                    "username": "******",
                    "email": "*****@*****.**",
                    "password": "******"
                }
            }
            dockerconfig.write(json.dumps(dockerconfig_contents))
            dockerconfig.flush()
            secret_path = temp_dir

    reactor_config = {
        'registries': [
            {
                'url': LOCALHOST_REGISTRY,
                'insecure': True,
                'auth': {
                    'cfg_path': secret_path
                },
            },
        ],
    }
    env = (MockEnv(workflow).for_plugin(TagAndPushPlugin.key).set_plugin_args({
        'koji_target':
        sources_koji_target
    }).set_reactor_config(reactor_config))

    add_koji_map_in_workflow(workflow, hub_url='', root_url='')

    wf_data = workflow.data
    if is_source_build:
        platforms = ['x86_64']
        workflow.build_dir.init_build_dirs(platforms, workflow.source)

        env.set_plugin_result(
            FetchSourcesPlugin.key,
            {'sources_for_koji_build_id': sources_koji_id},
        )
        env.set_plugin_result(
            SourceContainerPlugin.key,
            {'image_metadata': deepcopy(IMAGE_METADATA_DOCKER_ARCHIVE)},
        )
    else:
        platforms = ['x86_64', 'ppc64le', 's390x', 'aarch64']
        wf_data.tag_conf.add_unique_image(f'{LOCALHOST_REGISTRY}/{TEST_IMAGE}')
        workflow.user_params['flatpak'] = True
        workflow.build_dir.init_build_dirs(platforms, workflow.source)
        env.set_plugin_result(CheckAndSetPlatformsPlugin.key, platforms)

    class MockedClientSession(object):
        def __init__(self, hub, opts=None):
            pass

        def getBuild(self, build_info):
            if is_source_build:
                assert build_info == sources_koji_id
                return {
                    'extra': {
                        'image': {
                            'index': {
                                'pull': [sources_koji_pull_spec]
                            }
                        }
                    }
                }

            else:
                return None

        def krb_login(self, *args, **kwargs):
            return True

    session = MockedClientSession('')
    flexmock(koji, ClientSession=session)
    flexmock(random).should_receive('randrange').and_return(
        sources_random_number)
    flexmock(osbs.utils).should_receive('utcnow').and_return(sources_timestamp)

    if is_source_build:
        media_type = 'application/vnd.docker.distribution.manifest.v2+json'
    else:
        media_type = 'application/vnd.oci.image.manifest.v1+json'
    ref_name = "app/org.gnome.eog/x86_64/master"

    if not is_source_build:
        # Add a mock OCI image to 'flatpak_create_oci' results; this forces the tag_and_push
        # plugin to push with skopeo
        flatpak_create_oci_result: Dict[str, Any] = {}
        # No need to create image archives, just need to mock its metadata
        for current_platform in platforms:
            if unsupported_image_type:
                image_type = 'unsupported_type'
            else:
                image_type = IMAGE_TYPE_OCI
            metadata = deepcopy(IMAGE_METADATA_OCI)
            metadata['ref_name'] = ref_name.replace('x86_64', current_platform)
            metadata['type'] = image_type
            flatpak_create_oci_result[current_platform] = metadata
        env.set_plugin_result(FlatpakCreateOciPlugin.key,
                              flatpak_create_oci_result)

    # Mock the call to skopeo

    def check_run_skopeo(args):
        if fail_push:
            raise subprocess.CalledProcessError(returncode=1,
                                                cmd=args,
                                                output="Failed")
        assert args[0] == 'skopeo'
        if use_secret:
            assert '--authfile=' + os.path.join(secret_path,
                                                '.dockercfg') in args
        assert '--dest-tls-verify=false' in args
        if is_source_build:
            assert args[
                -2] == 'docker-archive://' + IMAGE_METADATA_DOCKER_ARCHIVE[
                    'path']
            output_image = 'docker://{}/{}:{}'.format(LOCALHOST_REGISTRY,
                                                      sources_koji_repo,
                                                      sources_tagname)
            assert args[-1] == output_image
        else:
            current_platform = args[-1].split('-')[-1]
            assert args[-2] == ('oci:' + IMAGE_METADATA_OCI['path'] + ':' +
                                ref_name.replace('x86_64', current_platform))
            assert args[-1].startswith('docker://' + LOCALHOST_REGISTRY +
                                       f'/{TEST_IMAGE_NAME}')
            assert '--format=v2s2' in args
        return ''

    (flexmock(retries).should_receive("run_cmd").replace_with(check_run_skopeo)
     )

    # Mock out the response from the registry once the OCI image is uploaded

    manifest_latest_url = "https://{}/v2/{}/manifests/latest".format(
        LOCALHOST_REGISTRY, TEST_IMAGE)
    manifest_url = "https://{}/v2/{}/manifests/{}".format(
        LOCALHOST_REGISTRY, TEST_IMAGE, DIGEST_OCI)
    manifest_source_tag_url = "https://{}/v2/{}/manifests/{}".format(
        LOCALHOST_REGISTRY, sources_koji_repo, sources_tagname)
    manifest_source_digest_url = "https://{}/v2/{}/manifests/{}".format(
        LOCALHOST_REGISTRY, sources_koji_repo, DIGEST_OCI)

    manifest_response = requests.Response()
    (flexmock(manifest_response,
              raise_for_status=lambda: None,
              json={},
              headers={
                  'Content-Type': media_type,
                  'Docker-Content-Digest': DIGEST_OCI
              }))

    manifest_unacceptable_response = requests.Response()
    (flexmock(manifest_unacceptable_response,
              status_code=404,
              json={"errors": [{
                  "code": "MANIFEST_UNKNOWN"
              }]}))

    def custom_get(method, url, headers, **kwargs):
        if url.startswith(
                manifest_latest_url) or url == manifest_source_tag_url:
            if headers['Accept'] == media_type:
                if is_source_build and not v2s2:
                    return manifest_unacceptable_response
                else:
                    return manifest_response
            else:
                return manifest_unacceptable_response

        if url == manifest_url or url == manifest_source_digest_url:
            return manifest_response

    mock_get_retry_session()

    (flexmock(
        requests.Session).should_receive('request').replace_with(custom_get))

    if fail_push or unsupported_image_type or (is_source_build and not v2s2):
        with pytest.raises(PluginFailedException):
            env.create_runner().run()

        if not fail_push and is_source_build and not v2s2:
            assert "Unable to fetch v2 schema 2 digest for" in caplog.text

        if unsupported_image_type and not fail_push:
            assert (
                'Attempt to push unsupported image type unsupported_type with skopeo'
                in caplog.text)
    else:
        env.create_runner().run()

        assert workflow.conf.registry
        repos_annotations = get_repositories_annotations(wf_data.tag_conf)
        assert wf_data.annotations['repositories'] == repos_annotations
Esempio n. 29
0
def mock_env(workflow):
    return MockEnv(workflow).for_plugin(CheckBaseImagePlugin.key)