Exemplo n.º 1
0
def test_parse_dockerfile_again_after_data_is_loaded(context_dir, build_dir,
                                                     tmpdir):
    context_dir = ContextDir(Path(tmpdir.join("context_dir")))
    wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)
    # Note that argument source is None, that causes a DummySource is created
    # and "FROM scratch" is included in the Dockerfile.
    workflow = DockerBuildWorkflow(context_dir, build_dir, NAMESPACE,
                                   PIPELINE_RUN_NAME, wf_data)
    assert ["scratch"] == workflow.data.dockerfile_images.original_parents

    # Now, save the workflow data and load it again
    wf_data.save(context_dir)

    another_source = DummySource("git", "https://git.host/")
    dfp = DockerfileParser(another_source.source_path)
    dfp.content = 'FROM fedora:35\nCMD ["bash", "--version"]'

    wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)
    flexmock(DockerBuildWorkflow).should_receive(
        "_parse_dockerfile_images").never()
    flexmock(wf_data.dockerfile_images).should_receive(
        "set_source_registry").never()
    workflow = DockerBuildWorkflow(context_dir,
                                   build_dir,
                                   NAMESPACE,
                                   PIPELINE_RUN_NAME,
                                   wf_data,
                                   source=another_source)
    assert ["scratch"] == workflow.data.dockerfile_images.original_parents, \
        "The dockerfile_images should not be changed."
Exemplo n.º 2
0
    def test_save_and_load(self, tmpdir):
        """Test save workflow data and then load them back properly."""
        tag_conf = TagConf()
        tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
        tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

        wf_data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
            # Test object in dict values is serialized
            tag_conf=tag_conf,
            plugins_results={
                "plugin_a": {
                    'parent-images-koji-builds': {
                        ImageName(repo='base', tag='latest').to_str(): {
                            'id': 123456789,
                            'nvr': 'base-image-1.0-99',
                            'state': 1,
                        },
                    },
                },
                "tag_and_push": [
                    # Such object in a list should be handled properly.
                    ImageName(registry="localhost:5000",
                              repo='image',
                              tag='latest'),
                ],
                "image_build": {
                    "logs": ["Build succeeds."]
                },
            },
            koji_upload_files=[
                {
                    "local_filename": "/path/to/build1.log",
                    "dest_filename": "x86_64-build.log",
                },
                {
                    "local_filename": "/path/to/dir1/remote-source.tar.gz",
                    "dest_filename": "remote-source.tar.gz",
                },
            ])

        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))
        wf_data.save(context_dir)

        assert context_dir.workflow_json.exists()

        # Verify the saved data matches the schema
        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        try:
            validate_with_schema(saved_data, "schemas/workflow_data.json")
        except osbs.exceptions.OsbsValidationException as e:
            pytest.fail(
                f"The dumped workflow data does not match JSON schema: {e}")

        # Load and verify the loaded data
        loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)

        assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images
        assert wf_data.tag_conf == loaded_wf_data.tag_conf
        assert wf_data.plugins_results == loaded_wf_data.plugins_results
Exemplo n.º 3
0
 def test_load_from_empty_dump(self):
     wf_data = ImageBuildWorkflowData.load({})
     empty_data = ImageBuildWorkflowData()
     field: Field
     for field in fields(ImageBuildWorkflowData):
         name = field.name
         assert getattr(empty_data, name) == getattr(wf_data, name)
Exemplo n.º 4
0
def test_ensure_workflow_data_is_saved_in_various_conditions(
        build_result, build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()
    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params).should_receive("source").and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    if build_result == "normal_return":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").once())

        task.execute()

    elif build_result == "error_raised":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").and_raise(BuildCanceledException))

        with pytest.raises(BuildCanceledException):
            task.execute()

    elif build_result == "terminated":
        # Start the task.execute in a separate process and terminate it.
        # This simulates the Cancel behavior by TERM signal.

        def _build_docker_image(self, *args, **kwargs):
            def _cancel_build(*args, **kwargs):
                raise BuildCanceledException()

            signal.signal(signal.SIGTERM, _cancel_build)
            # Whatever how long to sleep, just meaning it's running.
            time.sleep(5)

        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").replace_with(_build_docker_image))

        proc = multiprocessing.Process(target=task.execute)
        proc.start()

        # wait a short a while for the task.execute to run in the separate process.
        time.sleep(0.3)
        proc.terminate()

    assert context_dir.join("workflow.json").exists()

    wf_data = ImageBuildWorkflowData()
    wf_data.load_from_dir(ContextDir(Path(context_dir)))
    # As long as the data is loaded successfully, just check some
    # attributes to check the data.
    assert DockerfileImages() == wf_data.dockerfile_images
    assert {} == wf_data.prebuild_results
Exemplo n.º 5
0
 def test_load_from_empty_directory(self, tmpdir):
     context_dir = tmpdir.join("context_dir").mkdir()
     # Note: no data file is created here, e.g. workflow.json.
     wf_data = ImageBuildWorkflowData.load_from_dir(ContextDir(context_dir))
     assert wf_data.dockerfile_images.is_empty
     assert wf_data.tag_conf.is_empty
     assert {} == wf_data.plugins_results
Exemplo n.º 6
0
    def task_with_mocked_deps(self, monkeypatch, context_dir, build_dir, dummy_source, tmpdir):
        """Create a PluginBasedTask instance with mocked task parameters.

        Mock DockerBuildWorkflow accordingly. Return the mocked workflow instance for further
        customization in individual tests.
        """
        task_params = TaskParams(build_dir=build_dir,
                                 config_file="config.yaml",
                                 context_dir=str(context_dir),
                                 namespace="test-namespace",
                                 pipeline_run_name='test-pipeline-run',
                                 user_params={"a": "b"})

        expect_source = dummy_source
        (flexmock(task_params)
         .should_receive("source")
         .and_return(expect_source))

        expect_plugins = []
        monkeypatch.setattr(plugin_based.PluginBasedTask, "plugins_conf", expect_plugins)

        root_build_dir = RootBuildDir(build_dir)

        # Help to verify the RootBuildDir object is passed to the workflow object.
        (flexmock(plugin_based.PluginBasedTask)
         .should_receive("get_build_dir")
         .and_return(root_build_dir))

        # The test methods inside this test case do not involve the workflow
        # data. Thanks the dataclass, flexmock is able to assert the workflow
        # data object, which is created during task execution, is the same as
        # this one, that is they are all workflow data objects and the data
        # included are same.
        wf_data = ImageBuildWorkflowData()

        mocked_workflow = flexmock(inner.DockerBuildWorkflow)
        (
            mocked_workflow
            .should_call("__init__")
            .once()
            .with_args(
                context_dir=ContextDir,
                build_dir=root_build_dir,
                data=wf_data,
                namespace="test-namespace",
                pipeline_run_name='test-pipeline-run',
                source=expect_source,
                plugins_conf=expect_plugins,
                user_params={"a": "b"},
                reactor_config_path="config.yaml",
                keep_plugins_running=False,
            )
        )
        mocked_workflow.should_receive("build_docker_image").and_raise(
            AssertionError("you must mock the build_docker_image() workflow method")
        )

        task = plugin_based.PluginBasedTask(task_params)
        return task, mocked_workflow
Exemplo n.º 7
0
    def test_load_invalid_data_from_directory(self, data_path, prop_name,
                                              wrong_value, tmpdir):
        """Test the workflow data is validated by JSON schema when reading from context_dir."""
        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))

        data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch"]))
        data.tag_conf.add_floating_image("registry/httpd:2.4")
        data.plugins_results["plugin_1"] = "result"
        data.save(context_dir)

        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        # Make data invalid
        graceful_chain_get(saved_data, *data_path,
                           make_copy=False)[prop_name] = wrong_value
        context_dir.workflow_json.write_text(json.dumps(saved_data),
                                             encoding="utf-8")

        with pytest.raises(osbs.exceptions.OsbsValidationException):
            ImageBuildWorkflowData.load_from_dir(context_dir)
Exemplo n.º 8
0
    def test_load_from_dump(self):
        input_data = {
            "dockerfile_images": {
                "original_parents": ["scratch"],
                "local_parents": [],
                "source_registry": None,
                "organization": None,
            },
            "plugins_results": {
                "plugin_1": "result"
            },
            "tag_conf": {
                "floating_images": [
                    ImageName.parse("registry/httpd:2.4").to_str(),
                ],
            },
        }
        wf_data = ImageBuildWorkflowData.load(input_data)

        expected_df_images = DockerfileImages.load(
            input_data["dockerfile_images"])
        assert expected_df_images == wf_data.dockerfile_images
        assert input_data["plugins_results"] == wf_data.plugins_results
        assert TagConf.load(input_data["tag_conf"]) == wf_data.tag_conf
Exemplo n.º 9
0
def test_workflow_data_is_restored_before_starting_to_build(build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()

    # Write workflow data as it was saved by a previous task
    data = ImageBuildWorkflowData()
    # Note: for this test, dockerfile_images can't be passed as a kwarg to
    # the ImageBuildWorkflowData directly due to the flexmock of ImageBuildWorkflowData
    # in the fixture, otherwise
    # "TypeError: object.__new__() takes exactly one argument (the type to instantiate)"
    # will be raised. So far, have no idea why it happens.
    data.dockerfile_images = DockerfileImages(["scratch"])
    data.tag_conf.add_floating_image("registry/app:latest")
    data.plugins_results["plugin_a"] = {"var": "value"}
    data.save(ContextDir(Path(context_dir)))

    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params)
     .should_receive("source")
     .and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    class _FakeDockerBuildWorkflow:
        def __init__(self, build_dir, data=None, **kwargs):
            self.data = data

        def build_docker_image(self):
            assert DockerfileImages(["scratch"]) == self.data.dockerfile_images

    (flexmock(plugin_based.inner)
     .should_receive("DockerBuildWorkflow")
     .replace_with(_FakeDockerBuildWorkflow))

    task.execute()
Exemplo n.º 10
0
 def test_creation(self):
     data = ImageBuildWorkflowData()
     assert data.dockerfile_images.is_empty
     assert data.tag_conf.is_empty
     assert {} == data.plugins_results
Exemplo n.º 11
0
def get_workflow_data_json():
    tag_conf = TagConf()
    tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
    tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

    wf_data = ImageBuildWorkflowData(
        dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
        # Test object in dict values is serialized
        plugins_results={
            "image_build": {
                "logs": ["Build succeeds."]
            },
            "tag_and_push": [
                # Such object in a list should be handled properly.
                ImageName(registry="localhost:5000",
                          repo='image',
                          tag='latest'),
            ],
            "plugin_a": {
                'parent-images-koji-builds': {
                    ImageName(repo='base', tag='latest').to_str(): {
                        'id': 123456789,
                        'nvr': 'base-image-1.0-99',
                        'state': 1,
                    },
                },
            },
        },
        tag_conf=tag_conf,
        koji_upload_files=[
            {
                "local_filename": "/path/to/build1.log",
                "dest_filename": "x86_64-build.log",
            },
            {
                "local_filename": "/path/to/dir1/remote-source.tar.gz",
                "dest_filename": "remote-source.tar.gz",
            },
        ])

    wf_data.image_components = {
        'x86_64': [{
            'type': 'rpm',
            'name': 'python-docker-py',
            'version': '1.3.1',
            'release': '1.fc24',
            'arch': 'noarch',
            'sigmd5': '7c1f60d8cde73e97a45e0c489f4a3b26',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'fedora-repos-rawhide',
            'version': '24',
            'release': '0.1',
            'arch': 'noarch',
            'sigmd5': 'd41df1e059544d906363605d47477e60',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'gpg-pubkey-doc',
            'version': '1.0',
            'release': '1',
            'arch': 'noarch',
            'sigmd5': '00000000000000000000000000000000',
            'signature': None,
            'epoch': None
        }],
        'ppc64le': [{
            'type': 'rpm',
            'name': 'python-docker-py',
            'version': '1.3.1',
            'release': '1.fc24',
            'arch': 'noarch',
            'sigmd5': '7c1f60d8cde73e97a45e0c489f4a3b26',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'fedora-repos-rawhide',
            'version': '24',
            'release': '0.1',
            'arch': 'noarch',
            'sigmd5': 'd41df1e059544d906363605d47477e60',
            'signature': None,
            'epoch': None
        }, {
            'type': 'rpm',
            'name': 'gpg-pubkey-doc',
            'version': '1.0',
            'release': '1',
            'arch': 'noarch',
            'sigmd5': '00000000000000000000000000000000',
            'signature': None,
            'epoch': None
        }],
    }

    with TemporaryDirectory() as d:
        with open(os.path.join(d, 'workflow_data.json'), 'w') as f:
            json.dump(wf_data.as_dict(), f, cls=WorkflowDataEncoder)
        with open(os.path.join(d, 'workflow_data.json')) as f:
            workflow_json = json.load(f)

    return workflow_json
    def workflow(self,
                 build_process_failed=False,
                 registries=None,
                 registry_types=None,
                 platforms=None,
                 platform_descriptors=None,
                 group=True,
                 fail=False,
                 limit_media_types=None):
        tag_conf = TagConf()
        tag_conf.add_unique_image(self.TEST_UNIQUE_IMAGE)

        if platform_descriptors is None:
            platform_descriptors = [
                {
                    'platform': 'x86_64',
                    'architecture': 'amd64'
                },
                {
                    'platform': 'ppc64le',
                    'architecture': 'ppc64le'
                },
                {
                    'platform': 's390x',
                    'architecture': 's390x'
                },
            ]

        if platforms is None:
            platforms = [
                descriptor['platform'] for descriptor in platform_descriptors
            ]
        no_amd64 = 'x86_64' not in platforms

        keep_types = False
        if registries or registry_types:
            keep_types = True

        if registries is None and registry_types is None:
            registry_types = [
                MEDIA_TYPE_DOCKER_V2_SCHEMA1, MEDIA_TYPE_DOCKER_V2_SCHEMA2,
                MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST, MEDIA_TYPE_OCI_V1,
                MEDIA_TYPE_OCI_V1_INDEX
            ]

        if registries is None:
            registries = [{
                'url': 'https://container-registry.example.com/v2',
                'version': 'v2',
                'insecure': True,
                'expected_media_types': registry_types
            }]
        conf = {
            'version': 1,
            'registries': registries,
        }

        if limit_media_types is not None:
            conf['source_container'] = {
                'limit_media_types': limit_media_types,
            }

        if platform_descriptors:
            conf['platform_descriptors'] = platform_descriptors

        for registry in registries:

            def get_manifest(request):
                media_types = request.headers.get('Accept', '').split(',')
                content_type = media_types[0]

                return 200, {'Content-Type': content_type}, '{}'

            url_regex = "r'" + registry['url'] + ".*/manifests/.*'"
            url = re.compile(url_regex)
            responses.add_callback(responses.GET, url, callback=get_manifest)

            expected_types = registry.get('expected_media_types',
                                          registry_types or [])
            if fail == "bad_results":
                response_types = []
            elif not keep_types and no_amd64:
                response_types = [MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST]
            else:
                response_types = expected_types

            reguri = RegistryURI(registry['url']).docker_uri
            if re.match('http(s)?://', reguri):
                urlbase = reguri
            else:
                urlbase = 'https://{0}'.format(reguri)

            actual_v2_url = urlbase + "/v2/foo/manifests/unique-tag"

            if fail == "bad_results":
                response = requests.Response()
                (flexmock(response,
                          raise_for_status=lambda: None,
                          status_code=requests.codes.ok,
                          json={},
                          headers={'Content-Type': 'application/json'}))
                v1_response = response
                v1_oci_response = response
                v1_oci_index_response = response
                v2_response = response
                v2_list_response = response
            else:
                v1_response = self.config_response_none
                v1_oci_response = self.config_response_none
                v1_oci_index_response = self.config_response_none
                v2_response = self.config_response_none
                v2_list_response = self.config_response_none

            if MEDIA_TYPE_DOCKER_V2_SCHEMA1 in response_types:
                v1_response = self.config_response_config_v1
            if MEDIA_TYPE_DOCKER_V2_SCHEMA2 in response_types:
                v2_response = self.config_response_config_v2
            if MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST in response_types:
                v2_list_response = self.config_response_config_v2_list
            if MEDIA_TYPE_OCI_V1 in response_types:
                v1_oci_response = self.config_response_config_oci_v1
            if MEDIA_TYPE_OCI_V1_INDEX in response_types:
                v1_oci_index_response = self.config_response_config_oci_v1_index

            v2_header_v1 = {'Accept': MEDIA_TYPE_DOCKER_V2_SCHEMA1}
            v2_header_v2 = {'Accept': MEDIA_TYPE_DOCKER_V2_SCHEMA2}
            manifest_header = {'Accept': MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST}

            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers=v2_header_v1,
                auth=HTTPRegistryAuth,
                verify=False).and_return(v1_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers=v2_header_v2,
                auth=HTTPRegistryAuth,
                verify=False).and_return(v2_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers={
                    'Accept': MEDIA_TYPE_OCI_V1
                },
                auth=HTTPRegistryAuth,
                verify=False).and_return(v1_oci_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers={
                    'Accept': MEDIA_TYPE_OCI_V1_INDEX
                },
                auth=HTTPRegistryAuth,
                verify=False).and_return(v1_oci_index_response))
            (flexmock(requests.Session).should_receive('get').with_args(
                actual_v2_url,
                headers=manifest_header,
                auth=HTTPRegistryAuth,
                verify=False).and_return(v2_list_response))

        digests = {'media_type': MEDIA_TYPE_DOCKER_V2_MANIFEST_LIST}
        if not group:
            digests = {'media_type': MEDIA_TYPE_DOCKER_V2_SCHEMA2}
        plugins_results = {
            PLUGIN_CHECK_AND_SET_PLATFORMS_KEY: platforms,
            PLUGIN_GROUP_MANIFESTS_KEY: digests,
        }

        mock_get_retry_session()
        builder = flexmock()
        setattr(builder, 'image_id', 'sha256:(old)')

        flexmock(tag_conf=tag_conf)
        wf_data = ImageBuildWorkflowData()
        wf_data.tag_conf = tag_conf
        wf_data.plugins_results = plugins_results

        return flexmock(data=wf_data,
                        builder=builder,
                        conf=Configuration(raw_config=conf),
                        build_process_failed=build_process_failed)