示例#1
0
def test_bad_plugins_conf(plugins_conf: List[Dict[str, Any]], workflow,
                          caplog):
    flexmock(DockerfileParser, content='df_content')
    this_file = inspect.getfile(UpdateMaintainerPlugin)

    caplog.clear()

    workflow.plugins_conf = plugins_conf
    workflow.plugin_files = [this_file]

    # Find the 'watcher' parameter
    watchers = [conf.get('args', {}).get('watcher') for conf in plugins_conf]
    watcher = [x for x in watchers if x][0]

    with pytest.raises(PluginFailedException):
        workflow.build_docker_image()

    assert not watcher.was_called()
    assert workflow.data.plugins_errors
    assert all(
        [is_string_type(plugin) for plugin in workflow.data.plugins_errors])
    assert all([
        is_string_type(reason)
        for reason in workflow.data.plugins_errors.values()
    ])

    assert any(record.levelno == logging.ERROR for record in caplog.records)
def test_metadata_plugin(tmpdir):
    workflow = prepare()

    workflow.prebuild_results = {
        CpDockerfilePlugin.key: "dockerfile-content",
        DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2",
    }
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]
    assert "dockerfile" in labels
    assert is_string_type(labels['dockerfile'])
    assert "artefacts" in labels
    assert is_string_type(labels['artefacts'])
    assert "logs" in labels
    assert is_string_type(labels['logs'])
    assert "rpm-packages" in labels
    assert is_string_type(labels['rpm-packages'])
    assert "repositories" in labels
    assert is_string_type(labels['repositories'])
    assert "commit_id" in labels
    assert is_string_type(labels['commit_id'])
    assert "base-image-id" in labels
    assert is_string_type(labels['base-image-id'])
    assert "base-image-name" in labels
    assert is_string_type(labels['base-image-name'])
    assert "image-id" in labels
    assert is_string_type(labels['image-id'])

    assert "digests" in labels
    assert is_string_type(labels['digests'])
    digests = json.loads(labels['digests'])
    expected = [{
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
    },{
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
    }]
    assert digests == expected or digests == reversed(expected)
def test_metadata_plugin(tmpdir):
    workflow = prepare()

    workflow.prebuild_results = {
        CpDockerfilePlugin.key: "dockerfile-content",
        DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2",
    }
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]
    assert "dockerfile" in labels
    assert is_string_type(labels['dockerfile'])
    assert "artefacts" in labels
    assert is_string_type(labels['artefacts'])
    assert "logs" in labels
    assert is_string_type(labels['logs'])
    assert "rpm-packages" in labels
    assert is_string_type(labels['rpm-packages'])
    assert "repositories" in labels
    assert is_string_type(labels['repositories'])
    assert "commit_id" in labels
    assert is_string_type(labels['commit_id'])
    assert "base-image-id" in labels
    assert is_string_type(labels['base-image-id'])
    assert "base-image-name" in labels
    assert is_string_type(labels['base-image-name'])
    assert "image-id" in labels
    assert is_string_type(labels['image-id'])

    assert "digests" in labels
    assert is_string_type(labels['digests'])
    digests = json.loads(labels['digests'])
    expected = [{
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
    }]
    assert digests == expected or digests == reversed(expected)
示例#4
0
def test_plugin_errors(request, plugins, should_fail, should_log):
    """
    Try bad plugin configuration.
    """
    flexmock(DockerfileParser, content='df_content')
    flexmock(DockerApiPlugin).should_receive('run').and_return(
        DUMMY_BUILD_RESULT)
    this_file = inspect.getfile(PreRaises)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)
    fake_logger = FakeLogger()

    existing_logger = atomic_reactor.plugin.logger

    def restore_logger():
        atomic_reactor.plugin.logger = existing_logger

    request.addfinalizer(restore_logger)
    atomic_reactor.plugin.logger = fake_logger

    workflow = DockerBuildWorkflow(MOCK_SOURCE,
                                   'test-image',
                                   plugin_files=[this_file],
                                   **plugins)

    # Find the 'watcher' parameter
    watchers = [
        conf.get('args', {}).get('watcher') for plugin in plugins.values()
        for conf in plugin
    ]
    watcher = [x for x in watchers if x][0]

    if should_fail:
        with pytest.raises(PluginFailedException):
            workflow.build_docker_image()

        assert not watcher.was_called()
        assert workflow.plugins_errors
        assert all(
            [is_string_type(plugin) for plugin in workflow.plugins_errors])
        assert all([
            is_string_type(reason)
            for reason in workflow.plugins_errors.values()
        ])
    else:
        workflow.build_docker_image()
        assert watcher.was_called()
        assert not workflow.plugins_errors

    if should_log:
        assert len(fake_logger.errors) > 0
    else:
        assert len(fake_logger.errors) == 0
def test_plugin_errors(request, plugins, should_fail, should_log):
    """
    Try bad plugin configuration.
    """
    flexmock(DockerfileParser, content='df_content')
    flexmock(DockerApiPlugin).should_receive('run').and_return(DUMMY_BUILD_RESULT)
    this_file = inspect.getfile(PreRaises)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)
    fake_logger = FakeLogger()

    existing_logger = atomic_reactor.plugin.logger

    def restore_logger():
        atomic_reactor.plugin.logger = existing_logger

    request.addfinalizer(restore_logger)
    atomic_reactor.plugin.logger = fake_logger

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image',
                                   plugin_files=[this_file],
                                   **plugins)

    # Find the 'watcher' parameter
    watchers = [conf.get('args', {}).get('watcher')
                for plugin in plugins.values()
                for conf in plugin]
    watcher = [x for x in watchers if x][0]

    if should_fail:
        with pytest.raises(PluginFailedException):
            workflow.build_docker_image()

        assert not watcher.was_called()
        assert workflow.plugins_errors
        assert all([is_string_type(plugin)
                    for plugin in workflow.plugins_errors])
        assert all([is_string_type(reason)
                    for reason in workflow.plugins_errors.values()])
    else:
        workflow.build_docker_image()
        assert watcher.was_called()
        assert not workflow.plugins_errors

    if should_log:
        assert len(fake_logger.errors) > 0
    else:
        assert len(fake_logger.errors) == 0
def test_labels_metadata_plugin(tmpdir):

    koji_build_id = 1234
    workflow = prepare()

    workflow.exit_results = {
        KojiPromotePlugin.key: koji_build_id,
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" in labels
    assert is_string_type(labels["koji-build-id"])
    assert int(labels["koji-build-id"]) == koji_build_id
def test_labels_metadata_plugin(tmpdir, koji_plugin):

    koji_build_id = 1234
    workflow = prepare()
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.exit_results = {
        koji_plugin: koji_build_id,
    }

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" in labels
    assert is_string_type(labels["koji-build-id"])
    assert int(labels["koji-build-id"]) == koji_build_id
def test_labels_metadata_plugin(tmpdir, koji_plugin, reactor_config_map):

    koji_build_id = 1234
    workflow = prepare(reactor_config_map=reactor_config_map)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.exit_results = {
        koji_plugin: koji_build_id,
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" in labels
    assert is_string_type(labels["koji-build-id"])
    assert int(labels["koji-build-id"]) == koji_build_id
def test_labels_metadata_plugin(tmpdir):

    koji_build_id = 1234
    workflow = prepare()

    workflow.exit_results = {
        KojiPromotePlugin.key: koji_build_id,
    }

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" in labels
    assert is_string_type(labels["koji-build-id"])
    assert int(labels["koji-build-id"]) == koji_build_id
def test_metadata_plugin(workflow, source_dir,
                         help_results, expected_help_results, base_from_scratch,
                         verify_media_results, expected_media_results):
    if base_from_scratch:
        df_content = dedent("""\
            FROM fedora
            RUN yum install -y python-django
            CMD blabla
            FROM scratch
            RUN yum install -y python
            """)
    else:
        df_content = dedent("""\
            FROM fedora
            RUN yum install -y python-django
            CMD blabla
            """)

    prepare(workflow)
    mock_dockerfile(workflow, df_content)

    dockerfile = workflow.build_dir.any_platform.dockerfile_with_parent_env(
        workflow.imageutil.base_image_inspect()
    )

    df_images = DockerfileImages(dockerfile.parent_images)
    for parent in dockerfile.parent_images:
        if parent != 'scratch':
            df_images[parent] = "sha256:spamneggs"

    env = (MockEnv(workflow)
           .for_plugin(StoreMetadataPlugin.key)
           .set_plugin_args({"url": "http://example.com/"})
           .set_dockerfile_images(df_images)
           .set_plugin_result(RPMqaPlugin.key, "rpm1\nrpm2")
           .set_plugin_result(VerifyMediaTypesPlugin.key, verify_media_results)
           .set_plugin_result(AddHelpPlugin.key, help_results))

    if help_results is not None:
        workflow.data.annotations['help_file'] = help_results['help_file']

    workflow.fs_watcher._data = dict(fs_data=None)

    initial_timestamp = datetime.now()
    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.data.plugins_timestamps = {
        RPMqaPlugin.key: timestamp,
    }
    workflow.data.plugins_durations = {
        RPMqaPlugin.key: 3.03,
    }
    workflow.data.plugins_errors = {}

    output = env.create_runner().run()

    assert StoreMetadataPlugin.key in output
    annotations = output[StoreMetadataPlugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])
    assert annotations['commit_id'] == 'commit'

    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "parent_images" in annotations
    assert is_string_type(annotations['parent_images'])
    if base_from_scratch:
        assert annotations["base-image-name"] == ""
        assert annotations["base-image-id"] == ""
        assert '"scratch": "scratch"' in annotations['parent_images']
    else:
        assert annotations["base-image-name"] ==\
               workflow.data.dockerfile_images.original_base_image
        assert annotations["base-image-id"] != ""

        assert (workflow.data.dockerfile_images.base_image.to_str() in
                annotations['parent_images'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if expected_help_results is False:
        assert 'help_file' not in annotations
    else:
        assert json.loads(annotations['help_file']) == expected_help_results

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(list(set(media_types)))
    else:
        assert 'media-types' not in annotations
def test_metadata_plugin(tmpdir):
    initial_timestamp = datetime.now()
    workflow = prepare()

    workflow.prebuild_results = {
        CpDockerfilePlugin.key: "dockerfile-content",
        DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2",
    }
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }
    workflow.plugins_timestamps = {
        CpDockerfilePlugin.key: initial_timestamp.isoformat(),
        DistgitFetchArtefactsPlugin.key: (initial_timestamp + timedelta(seconds=1)).isoformat(),
        PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(),
    }
    workflow.plugins_durations = {
        CpDockerfilePlugin.key: 1.01,
        DistgitFetchArtefactsPlugin.key: 2.02,
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {
        DistgitFetchArtefactsPlugin.key: 'foo'
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "artefacts" in annotations
    assert is_string_type(annotations['artefacts'])
    assert "logs" in annotations
    assert is_string_type(annotations['logs'])
    assert annotations['logs'] == ''
    assert "rpm-packages" in annotations
    assert is_string_type(annotations['rpm-packages'])
    assert annotations['rpm-packages'] == ''
    assert "repositories" in annotations
    assert is_string_type(annotations['repositories'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])
    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
    },{
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
    }]
    assert digests == expected or digests == reversed(expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "distgit_fetch_artefacts" in plugins_metadata["errors"]

    assert "cp_dockerfile" in plugins_metadata["durations"]
    assert "distgit_fetch_artefacts" in plugins_metadata["durations"]
    assert "all_rpm_packages" in plugins_metadata["durations"]
def test_metadata_plugin(tmpdir, br_annotations, expected_br_annotations,
                         br_labels, expected_br_labels, koji, help_results,
                         expected_help_results, pulp_push_results,
                         expected_pulp_push_results, pulp_pull_results,
                         expected_pulp_pull_results):
    initial_timestamp = datetime.now()
    workflow = prepare()
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {AddHelpPlugin.key: help_results}
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
        PLUGIN_PULP_PUSH_KEY: pulp_push_results,
    }
    workflow.exit_results = {
        PulpPullPlugin.key: pulp_pull_results,
    }

    if br_annotations or br_labels:
        workflow.build_result = BuildResult(
            image_id=INPUT_IMAGE,
            annotations={'br_annotations': br_annotations}
            if br_annotations else None,
            labels={'br_labels': br_labels} if br_labels else None,
        )

    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: timestamp,
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {}

    if koji:
        cm_annotations = {
            'metadata_fragment_key': 'metadata.json',
            'metadata_fragment': 'configmap/build-1-md'
        }
        workflow.postbuild_results[
            PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = cm_annotations
        workflow.plugins_timestamps[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = timestamp
        workflow.plugins_durations[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = 3.03

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "logs" in annotations
    assert is_string_type(annotations['logs'])
    assert annotations['logs'] == ''
    assert "rpm-packages" in annotations
    assert is_string_type(annotations['rpm-packages'])
    assert annotations['rpm-packages'] == ''
    assert "repositories" in annotations
    assert is_string_type(annotations['repositories'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])
    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])

    if koji:
        assert "metadata_fragment" in annotations
        assert is_string_type(annotations['metadata_fragment'])
        assert "metadata_fragment_key" in annotations
        assert is_string_type(annotations['metadata_fragment_key'])
    else:
        assert "metadata_fragment" not in annotations
        assert "metadata_fragment_key" not in annotations

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if br_annotations:
        assert annotations['br_annotations'] == expected_br_annotations
    else:
        assert 'br_annotations' not in annotations

    if br_labels:
        assert labels['br_labels'] == expected_br_labels
    else:
        assert 'br_labels' not in labels

    if expected_help_results is False:
        assert 'help_file' not in annotations
    else:
        assert json.loads(annotations['help_file']) == expected_help_results

    if expected_pulp_push_results is False:
        assert 'v1-image-id' not in annotations
    else:
        assert annotations['v1-image-id'] == expected_pulp_push_results

    if not expected_pulp_pull_results and not expected_pulp_push_results:
        assert 'media-types' not in annotations
    else:
        media_types = []
        if expected_pulp_push_results:
            media_types = ['application/json']
        if expected_pulp_pull_results:
            media_types += pulp_pull_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(
            list(set(media_types)))
示例#13
0
def test_metadata_plugin(tmpdir, br_annotations, expected_br_annotations,
                         br_labels, expected_br_labels, koji, help_results,
                         expected_help_results, base_from_scratch,
                         verify_media_results, expected_media_results):
    initial_timestamp = datetime.now()
    workflow = prepare()
    if base_from_scratch:
        df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla
FROM scratch
RUN yum install -y python"""
        workflow.builder.base_from_scratch = base_from_scratch
    else:
        df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""

    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.dockerfile_images = DockerfileImages(df.parent_images)
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {AddHelpPlugin.key: help_results}
    if help_results is not None:
        workflow.annotations['help_file'] = help_results['help_file']

    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }
    workflow.exit_results = {
        PLUGIN_VERIFY_MEDIA_KEY: verify_media_results,
    }
    workflow.fs_watcher._data = dict(fs_data=None)

    if br_annotations or br_labels:
        workflow.build_result = BuildResult(
            image_id=INPUT_IMAGE,
            annotations={'br_annotations': br_annotations}
            if br_annotations else None,
            labels={'br_labels': br_labels} if br_labels else None,
        )

    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: timestamp,
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {}

    if koji:
        cm_annotations = {
            'metadata_fragment_key': 'metadata.json',
            'metadata_fragment': 'configmap/build-1-md'
        }
        workflow.postbuild_results[
            PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = cm_annotations
        workflow.plugins_timestamps[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = timestamp
        workflow.plugins_durations[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = 3.03

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "repositories" in annotations
    assert is_string_type(annotations['repositories'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])

    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "parent_images" in annotations
    assert is_string_type(annotations['parent_images'])
    if base_from_scratch:
        assert annotations["base-image-name"] == ""
        assert annotations["base-image-id"] == ""
        assert '"scratch": "scratch"' in annotations['parent_images']
    else:
        assert annotations["base-image-name"] ==\
               workflow.builder.dockerfile_images.original_base_image
        assert annotations["base-image-id"] != ""
        assert (workflow.builder.dockerfile_images.base_image.to_str()
                in annotations['parent_images'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']

    if koji:
        assert "metadata_fragment" in annotations
        assert is_string_type(annotations['metadata_fragment'])
        assert "metadata_fragment_key" in annotations
        assert is_string_type(annotations['metadata_fragment_key'])
    else:
        assert "metadata_fragment" not in annotations
        assert "metadata_fragment_key" not in annotations

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if br_annotations:
        assert annotations['br_annotations'] == expected_br_annotations
    else:
        assert 'br_annotations' not in annotations

    if br_labels:
        assert labels['br_labels'] == expected_br_labels
    else:
        assert 'br_labels' not in labels

    if expected_help_results is False:
        assert 'help_file' not in annotations
    else:
        assert json.loads(annotations['help_file']) == expected_help_results

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(
            list(set(media_types)))
    else:
        assert 'media-types' not in annotations
def test_metadata_plugin(tmpdir, br_annotations, expected_br_annotations,
                         br_labels, expected_br_labels, koji,
                         help_results, expected_help_results, base_from_scratch,
                         pulp_push_results, expected_pulp_push_results,
                         pulp_pull_results, expected_pulp_pull_results,
                         verify_media_results, expected_media_results,
                         reactor_config_map):
    initial_timestamp = datetime.now()
    workflow = prepare(reactor_config_map=reactor_config_map)
    if base_from_scratch:
        df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla
FROM scratch
RUN yum install -y python"""
        workflow.builder.base_from_scratch = base_from_scratch
    else:
        df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""

    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {
        AddHelpPlugin.key: help_results
    }
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
        PLUGIN_PULP_PUSH_KEY: pulp_push_results,
    }
    workflow.exit_results = {
        PulpPullPlugin.key: pulp_pull_results,
        PLUGIN_VERIFY_MEDIA_KEY: verify_media_results,
    }
    workflow.fs_watcher._data = dict(fs_data=None)

    if br_annotations or br_labels:
        workflow.build_result = BuildResult(
            image_id=INPUT_IMAGE,
            annotations={'br_annotations': br_annotations} if br_annotations else None,
            labels={'br_labels': br_labels} if br_labels else None,
        )

    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: timestamp,
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {}

    if koji:
        cm_annotations = {'metadata_fragment_key': 'metadata.json',
                          'metadata_fragment': 'configmap/build-1-md'}
        workflow.postbuild_results[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = cm_annotations
        workflow.plugins_timestamps[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = timestamp
        workflow.plugins_durations[PLUGIN_KOJI_UPLOAD_PLUGIN_KEY] = 3.03

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "repositories" in annotations
    assert is_string_type(annotations['repositories'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])

    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "parent_images" in annotations
    assert is_string_type(annotations['parent_images'])
    if base_from_scratch:
        assert annotations["base-image-name"] == ""
        assert annotations["base-image-id"] == ""
        assert '"scratch": "scratch"' in annotations['parent_images']
    else:
        assert annotations["base-image-name"] == workflow.builder.original_base_image.to_str()
        assert annotations["base-image-id"] != ""
    assert workflow.builder.original_base_image.to_str() in annotations['parent_images']
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']

    if koji:
        assert "metadata_fragment" in annotations
        assert is_string_type(annotations['metadata_fragment'])
        assert "metadata_fragment_key" in annotations
        assert is_string_type(annotations['metadata_fragment_key'])
    else:
        assert "metadata_fragment" not in annotations
        assert "metadata_fragment_key" not in annotations

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if br_annotations:
        assert annotations['br_annotations'] == expected_br_annotations
    else:
        assert 'br_annotations' not in annotations

    if br_labels:
        assert labels['br_labels'] == expected_br_labels
    else:
        assert 'br_labels' not in labels

    if expected_help_results is False:
        assert 'help_file' not in annotations
    else:
        assert json.loads(annotations['help_file']) == expected_help_results

    if expected_pulp_push_results is False:
        assert 'v1-image-id' not in annotations
    else:
        assert annotations['v1-image-id'] == expected_pulp_push_results

    if expected_pulp_pull_results or expected_pulp_push_results or expected_media_results:
        media_types = []
        if expected_pulp_push_results:
            media_types = ['application/json']
        if expected_pulp_pull_results:
            media_types += pulp_pull_results
        if expected_media_results:
            media_types += expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(list(set(media_types)))
    else:
        assert 'media-types' not in annotations
def test_metadata_plugin_source(image_id, verify_media_results, expected_media_results, workflow):
    sources_for_nvr = 'image_build'
    sources_for_koji_build_id = '12345'

    fetch_sources_result = {
        'sources_for_koji_build_id': sources_for_koji_build_id,
        'sources_for_nvr': sources_for_nvr,
        'image_sources_dir': 'source_dir',
    }

    env = (MockEnv(workflow)
           .for_plugin(StoreMetadataPlugin.key)
           .set_plugin_args({"url": "http://example.com/"})
           .set_plugin_result(PLUGIN_FETCH_SOURCES_KEY, fetch_sources_result)
           .set_plugin_result(VerifyMediaTypesPlugin.key, verify_media_results))
    prepare(workflow)
    if image_id:
        workflow.data.koji_source_manifest = {'config': {'digest': image_id}}

    workflow.fs_watcher._data = dict(fs_data=None)

    initial_timestamp = datetime.now()
    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.data.plugins_timestamps = {
        PLUGIN_FETCH_SOURCES_KEY: timestamp,
    }
    workflow.data.plugins_durations = {
        PLUGIN_FETCH_SOURCES_KEY: 3.03,
    }
    workflow.data.plugins_errors = {}

    output = env.create_runner().run()

    assert StoreMetadataPlugin.key in output
    annotations = output[StoreMetadataPlugin.key]["annotations"]
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert annotations['image-id'] == (image_id if image_id else '')
    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert PLUGIN_FETCH_SOURCES_KEY in plugins_metadata["durations"]

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(list(set(media_types)))
    else:
        assert 'media-types' not in annotations
示例#16
0
def test_metadata_plugin(tmpdir):
    initial_timestamp = datetime.now()
    workflow = prepare()

    workflow.prebuild_results = {
        CpDockerfilePlugin.key: "dockerfile-content",
        DistgitFetchArtefactsPlugin.key: "artefact1\nartefact2",
    }
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }
    workflow.plugins_timestamps = {
        CpDockerfilePlugin.key:
        initial_timestamp.isoformat(),
        DistgitFetchArtefactsPlugin.key:
        (initial_timestamp + timedelta(seconds=1)).isoformat(),
        PostBuildRPMqaPlugin.key:
        (initial_timestamp + timedelta(seconds=3)).isoformat(),
    }
    workflow.plugins_durations = {
        CpDockerfilePlugin.key: 1.01,
        DistgitFetchArtefactsPlugin.key: 2.02,
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {DistgitFetchArtefactsPlugin.key: 'foo'}

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]
    assert "dockerfile" in labels
    assert is_string_type(labels['dockerfile'])
    assert "artefacts" in labels
    assert is_string_type(labels['artefacts'])
    assert "logs" in labels
    assert is_string_type(labels['logs'])
    assert labels['logs'] == ''
    assert "rpm-packages" in labels
    assert is_string_type(labels['rpm-packages'])
    assert labels['rpm-packages'] == ''
    assert "repositories" in labels
    assert is_string_type(labels['repositories'])
    assert "commit_id" in labels
    assert is_string_type(labels['commit_id'])
    assert "base-image-id" in labels
    assert is_string_type(labels['base-image-id'])
    assert "base-image-name" in labels
    assert is_string_type(labels['base-image-name'])
    assert "image-id" in labels
    assert is_string_type(labels['image-id'])

    assert "digests" in labels
    assert is_string_type(labels['digests'])
    digests = json.loads(labels['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
    }]
    assert digests == expected or digests == reversed(expected)

    assert "plugins-metadata" in labels
    assert "errors" in labels["plugins-metadata"]
    assert "durations" in labels["plugins-metadata"]
    assert "timestamps" in labels["plugins-metadata"]

    plugins_metadata = json.loads(labels["plugins-metadata"])
    assert "distgit_fetch_artefacts" in plugins_metadata["errors"]

    assert "cp_dockerfile" in plugins_metadata["durations"]
    assert "distgit_fetch_artefacts" in plugins_metadata["durations"]
    assert "all_rpm_packages" in plugins_metadata["durations"]
示例#17
0
def test_metadata_plugin_source(image_id, br_annotations,
                                expected_br_annotations, br_labels,
                                expected_br_labels, verify_media_results,
                                expected_media_results):
    initial_timestamp = datetime.now()
    workflow = prepare()

    if image_id:
        workflow.koji_source_manifest = {'config': {'digest': image_id}}

    sources_for_nvr = 'image_build'
    sources_for_koji_build_id = '12345'
    workflow.labels['sources_for_koji_build_id'] = sources_for_koji_build_id
    workflow.prebuild_results = {
        PLUGIN_FETCH_SOURCES_KEY: {
            'sources_for_koji_build_id': sources_for_koji_build_id,
            'sources_for_nvr': sources_for_nvr,
            'image_sources_dir': 'source_dir',
        }
    }
    workflow.exit_results = {
        PLUGIN_VERIFY_MEDIA_KEY: verify_media_results,
    }
    workflow.fs_watcher._data = dict(fs_data=None)

    if br_annotations or br_labels:
        workflow.build_result = BuildResult(
            image_id=INPUT_IMAGE,
            annotations={'br_annotations': br_annotations}
            if br_annotations else None,
            labels={'br_labels': br_labels} if br_labels else None,
        )

    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.plugins_timestamps = {
        PLUGIN_FETCH_SOURCES_KEY: timestamp,
    }
    workflow.plugins_durations = {
        PLUGIN_FETCH_SOURCES_KEY: 3.03,
    }
    workflow.plugins_errors = {}

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "repositories" in annotations
    assert is_string_type(annotations['repositories'])
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert annotations['image-id'] == (image_id if image_id else '')
    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": DOCKER0_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert PLUGIN_FETCH_SOURCES_KEY in plugins_metadata["durations"]

    if br_annotations:
        assert annotations['br_annotations'] == expected_br_annotations
    else:
        assert 'br_annotations' not in annotations

    if br_labels:
        assert labels['br_labels'] == expected_br_labels
    else:
        assert 'br_labels' not in labels
    assert 'sources_for_koji_build_id' in labels
    assert labels['sources_for_koji_build_id'] == sources_for_koji_build_id

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(
            list(set(media_types)))
    else:
        assert 'media-types' not in annotations
def test_metadata_plugin(tmpdir, br_annotations, expected_br_annotations,
                         br_labels, expected_br_labels):
    initial_timestamp = datetime.now()
    workflow = prepare()
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {}
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }

    if br_annotations or br_labels:
        workflow.build_result = BuildResult(
            image_id=INPUT_IMAGE,
            annotations={'br_annotations': br_annotations} if br_annotations else None,
            labels={'br_labels': br_labels} if br_labels else None,
        )

    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(),
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {}

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "logs" in annotations
    assert is_string_type(annotations['logs'])
    assert annotations['logs'] == ''
    assert "rpm-packages" in annotations
    assert is_string_type(annotations['rpm-packages'])
    assert annotations['rpm-packages'] == ''
    assert "repositories" in annotations
    assert is_string_type(annotations['repositories'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])
    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
    },{
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
    }]
    assert digests == expected or digests == reversed(expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if br_annotations:
        assert annotations['br_annotations'] == expected_br_annotations
    else:
        assert 'br_annotations' not in annotations

    if br_labels:
        assert labels['br_labels'] == expected_br_labels
    else:
        assert 'br_labels' not in labels
def test_metadata_plugin(workflow, source_dir, help_results,
                         expected_help_results, base_from_scratch,
                         verify_media_results, expected_media_results):
    initial_timestamp = datetime.now()
    prepare(workflow)
    if base_from_scratch:
        df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla
FROM scratch
RUN yum install -y python"""
    else:
        df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""

    df = df_parser(str(source_dir))
    df.content = df_content
    workflow.data.dockerfile_images = DockerfileImages(df.parent_images)
    for parent in df.parent_images:
        if parent != 'scratch':
            workflow.data.dockerfile_images[parent] = "sha256:spamneggs"
    flexmock(workflow, df_path=df.dockerfile_path)
    workflow.df_dir = str(source_dir)

    workflow.data.prebuild_results = {AddHelpPlugin.key: help_results}

    if help_results is not None:
        workflow.data.annotations['help_file'] = help_results['help_file']

    workflow.data.postbuild_results = {
        PostBuildRPMqaPlugin.key: "rpm1\nrpm2",
    }
    workflow.data.postbuild_results = {
        PLUGIN_VERIFY_MEDIA_KEY: verify_media_results,
    }
    workflow.fs_watcher._data = dict(fs_data=None)

    timestamp = (initial_timestamp + timedelta(seconds=3)).isoformat()
    workflow.data.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: timestamp,
    }
    workflow.data.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.data.plugins_errors = {}

    runner = ExitPluginsRunner(workflow, [{
        'name': StoreMetadataPlugin.key,
        "args": {
            "url": "http://example.com/"
        }
    }])
    output = runner.run()
    assert StoreMetadataPlugin.key in output
    annotations = output[StoreMetadataPlugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert is_string_type(annotations['dockerfile'])
    assert "commit_id" in annotations
    assert is_string_type(annotations['commit_id'])

    assert "base-image-id" in annotations
    assert is_string_type(annotations['base-image-id'])
    assert "base-image-name" in annotations
    assert is_string_type(annotations['base-image-name'])
    assert "parent_images" in annotations
    assert is_string_type(annotations['parent_images'])
    if base_from_scratch:
        assert annotations["base-image-name"] == ""
        assert annotations["base-image-id"] == ""
        assert '"scratch": "scratch"' in annotations['parent_images']
    else:
        assert annotations["base-image-name"] ==\
               workflow.data.dockerfile_images.original_base_image
        assert annotations["base-image-id"] != ""

        assert (workflow.data.dockerfile_images.base_image.to_str()
                in annotations['parent_images'])
    assert "image-id" in annotations
    assert is_string_type(annotations['image-id'])
    assert "filesystem" in annotations
    assert "fs_data" in annotations['filesystem']

    assert "digests" in annotations
    assert is_string_type(annotations['digests'])
    digests = json.loads(annotations['digests'])
    expected = [{
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": TEST_IMAGE,
        "tag": 'latest',
        "digest": DIGEST1,
        "version": "v2"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST_NOT_USED,
        "version": "v1"
    }, {
        "registry": LOCALHOST_REGISTRY,
        "repository": "namespace/image",
        "tag": 'asd123',
        "digest": DIGEST2,
        "version": "v2"
    }]
    assert all(digest in expected for digest in digests)
    assert all(digest in digests for digest in expected)

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["durations"]

    if expected_help_results is False:
        assert 'help_file' not in annotations
    else:
        assert json.loads(annotations['help_file']) == expected_help_results

    if expected_media_results:
        media_types = expected_media_results
        assert sorted(json.loads(annotations['media-types'])) == sorted(
            list(set(media_types)))
    else:
        assert 'media-types' not in annotations