Example #1
0
    def test_save_and_load(self, tmpdir):
        """Test save workflow data and then load them back properly."""
        tag_conf = TagConf()
        tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
        tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

        wf_data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
            # Test object in dict values is serialized
            tag_conf=tag_conf,
            plugins_results={
                "plugin_a": {
                    'parent-images-koji-builds': {
                        ImageName(repo='base', tag='latest').to_str(): {
                            'id': 123456789,
                            'nvr': 'base-image-1.0-99',
                            'state': 1,
                        },
                    },
                },
                "tag_and_push": [
                    # Such object in a list should be handled properly.
                    ImageName(registry="localhost:5000",
                              repo='image',
                              tag='latest'),
                ],
                "image_build": {
                    "logs": ["Build succeeds."]
                },
            },
            koji_upload_files=[
                {
                    "local_filename": "/path/to/build1.log",
                    "dest_filename": "x86_64-build.log",
                },
                {
                    "local_filename": "/path/to/dir1/remote-source.tar.gz",
                    "dest_filename": "remote-source.tar.gz",
                },
            ])

        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))
        wf_data.save(context_dir)

        assert context_dir.workflow_json.exists()

        # Verify the saved data matches the schema
        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        try:
            validate_with_schema(saved_data, "schemas/workflow_data.json")
        except osbs.exceptions.OsbsValidationException as e:
            pytest.fail(
                f"The dumped workflow data does not match JSON schema: {e}")

        # Load and verify the loaded data
        loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)

        assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images
        assert wf_data.tag_conf == loaded_wf_data.tag_conf
        assert wf_data.plugins_results == loaded_wf_data.plugins_results
Example #2
0
    def test_load_invalid_data_from_directory(self, data_path, prop_name,
                                              wrong_value, tmpdir):
        """Test the workflow data is validated by JSON schema when reading from context_dir."""
        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))

        data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch"]))
        data.tag_conf.add_floating_image("registry/httpd:2.4")
        data.plugins_results["plugin_1"] = "result"
        data.save(context_dir)

        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        # Make data invalid
        graceful_chain_get(saved_data, *data_path,
                           make_copy=False)[prop_name] = wrong_value
        context_dir.workflow_json.write_text(json.dumps(saved_data),
                                             encoding="utf-8")

        with pytest.raises(osbs.exceptions.OsbsValidationException):
            ImageBuildWorkflowData.load_from_dir(context_dir)
def test_workflow_data_is_restored_before_starting_to_build(build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()

    # Write workflow data as it was saved by a previous task
    data = ImageBuildWorkflowData()
    # Note: for this test, dockerfile_images can't be passed as a kwarg to
    # the ImageBuildWorkflowData directly due to the flexmock of ImageBuildWorkflowData
    # in the fixture, otherwise
    # "TypeError: object.__new__() takes exactly one argument (the type to instantiate)"
    # will be raised. So far, have no idea why it happens.
    data.dockerfile_images = DockerfileImages(["scratch"])
    data.tag_conf.add_floating_image("registry/app:latest")
    data.plugins_results["plugin_a"] = {"var": "value"}
    data.save(ContextDir(Path(context_dir)))

    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params)
     .should_receive("source")
     .and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    class _FakeDockerBuildWorkflow:
        def __init__(self, build_dir, data=None, **kwargs):
            self.data = data

        def build_docker_image(self):
            assert DockerfileImages(["scratch"]) == self.data.dockerfile_images

    (flexmock(plugin_based.inner)
     .should_receive("DockerBuildWorkflow")
     .replace_with(_FakeDockerBuildWorkflow))

    task.execute()