Example #1
0
 def test_get_platform_dir(self, platform, error, tmpdir):
     if error is None:
         dir_path = Path(tmpdir.join(platform))
         assert dir_path == ContextDir(Path(tmpdir)).get_platform_dir(platform)
         assert dir_path.exists()
     else:
         with error:
             ContextDir(Path(tmpdir)).get_platform_dir(platform)
Example #2
0
    def test_save_and_load(self, tmpdir):
        """Test save workflow data and then load them back properly."""
        tag_conf = TagConf()
        tag_conf.add_floating_image(ImageName.parse("registry/image:latest"))
        tag_conf.add_primary_image(ImageName.parse("registry/image:1.0"))

        wf_data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch", "registry/f:35"]),
            # Test object in dict values is serialized
            tag_conf=tag_conf,
            plugins_results={
                "plugin_a": {
                    'parent-images-koji-builds': {
                        ImageName(repo='base', tag='latest').to_str(): {
                            'id': 123456789,
                            'nvr': 'base-image-1.0-99',
                            'state': 1,
                        },
                    },
                },
                "tag_and_push": [
                    # Such object in a list should be handled properly.
                    ImageName(registry="localhost:5000",
                              repo='image',
                              tag='latest'),
                ],
                "image_build": {
                    "logs": ["Build succeeds."]
                },
            },
            koji_upload_files=[
                {
                    "local_filename": "/path/to/build1.log",
                    "dest_filename": "x86_64-build.log",
                },
                {
                    "local_filename": "/path/to/dir1/remote-source.tar.gz",
                    "dest_filename": "remote-source.tar.gz",
                },
            ])

        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))
        wf_data.save(context_dir)

        assert context_dir.workflow_json.exists()

        # Verify the saved data matches the schema
        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        try:
            validate_with_schema(saved_data, "schemas/workflow_data.json")
        except osbs.exceptions.OsbsValidationException as e:
            pytest.fail(
                f"The dumped workflow data does not match JSON schema: {e}")

        # Load and verify the loaded data
        loaded_wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)

        assert wf_data.dockerfile_images == loaded_wf_data.dockerfile_images
        assert wf_data.tag_conf == loaded_wf_data.tag_conf
        assert wf_data.plugins_results == loaded_wf_data.plugins_results
Example #3
0
 def test_load_from_empty_directory(self, tmpdir):
     context_dir = tmpdir.join("context_dir").mkdir()
     # Note: no data file is created here, e.g. workflow.json.
     wf_data = ImageBuildWorkflowData.load_from_dir(ContextDir(context_dir))
     assert wf_data.dockerfile_images.is_empty
     assert wf_data.tag_conf.is_empty
     assert {} == wf_data.plugins_results
Example #4
0
def test_parse_dockerfile_again_after_data_is_loaded(context_dir, build_dir,
                                                     tmpdir):
    context_dir = ContextDir(Path(tmpdir.join("context_dir")))
    wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)
    # Note that argument source is None, that causes a DummySource is created
    # and "FROM scratch" is included in the Dockerfile.
    workflow = DockerBuildWorkflow(context_dir, build_dir, NAMESPACE,
                                   PIPELINE_RUN_NAME, wf_data)
    assert ["scratch"] == workflow.data.dockerfile_images.original_parents

    # Now, save the workflow data and load it again
    wf_data.save(context_dir)

    another_source = DummySource("git", "https://git.host/")
    dfp = DockerfileParser(another_source.source_path)
    dfp.content = 'FROM fedora:35\nCMD ["bash", "--version"]'

    wf_data = ImageBuildWorkflowData.load_from_dir(context_dir)
    flexmock(DockerBuildWorkflow).should_receive(
        "_parse_dockerfile_images").never()
    flexmock(wf_data.dockerfile_images).should_receive(
        "set_source_registry").never()
    workflow = DockerBuildWorkflow(context_dir,
                                   build_dir,
                                   NAMESPACE,
                                   PIPELINE_RUN_NAME,
                                   wf_data,
                                   source=another_source)
    assert ["scratch"] == workflow.data.dockerfile_images.original_parents, \
        "The dockerfile_images should not be changed."
Example #5
0
def workflow(context_dir, build_dir, dummy_source, user_params):
    return DockerBuildWorkflow(
        ContextDir(context_dir),
        RootBuildDir(build_dir),
        source=dummy_source,
        namespace='test-namespace',
        pipeline_run_name='test-pipeline-run',
    )
Example #6
0
def test_ensure_workflow_data_is_saved_in_various_conditions(
        build_result, build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()
    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params).should_receive("source").and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    if build_result == "normal_return":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").once())

        task.execute()

    elif build_result == "error_raised":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").and_raise(BuildCanceledException))

        with pytest.raises(BuildCanceledException):
            task.execute()

    elif build_result == "terminated":
        # Start the task.execute in a separate process and terminate it.
        # This simulates the Cancel behavior by TERM signal.

        def _build_docker_image(self, *args, **kwargs):
            def _cancel_build(*args, **kwargs):
                raise BuildCanceledException()

            signal.signal(signal.SIGTERM, _cancel_build)
            # Whatever how long to sleep, just meaning it's running.
            time.sleep(5)

        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").replace_with(_build_docker_image))

        proc = multiprocessing.Process(target=task.execute)
        proc.start()

        # wait a short a while for the task.execute to run in the separate process.
        time.sleep(0.3)
        proc.terminate()

    assert context_dir.join("workflow.json").exists()

    wf_data = ImageBuildWorkflowData()
    wf_data.load_from_dir(ContextDir(Path(context_dir)))
    # As long as the data is loaded successfully, just check some
    # attributes to check the data.
    assert DockerfileImages() == wf_data.dockerfile_images
    assert {} == wf_data.prebuild_results
Example #7
0
    def test_load_invalid_data_from_directory(self, data_path, prop_name,
                                              wrong_value, tmpdir):
        """Test the workflow data is validated by JSON schema when reading from context_dir."""
        context_dir = ContextDir(Path(tmpdir.join("context_dir").mkdir()))

        data = ImageBuildWorkflowData(
            dockerfile_images=DockerfileImages(["scratch"]))
        data.tag_conf.add_floating_image("registry/httpd:2.4")
        data.plugins_results["plugin_1"] = "result"
        data.save(context_dir)

        saved_data = json.loads(context_dir.workflow_json.read_bytes())
        # Make data invalid
        graceful_chain_get(saved_data, *data_path,
                           make_copy=False)[prop_name] = wrong_value
        context_dir.workflow_json.write_text(json.dumps(saved_data),
                                             encoding="utf-8")

        with pytest.raises(osbs.exceptions.OsbsValidationException):
            ImageBuildWorkflowData.load_from_dir(context_dir)
def test_workflow_data_is_restored_before_starting_to_build(build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()

    # Write workflow data as it was saved by a previous task
    data = ImageBuildWorkflowData()
    # Note: for this test, dockerfile_images can't be passed as a kwarg to
    # the ImageBuildWorkflowData directly due to the flexmock of ImageBuildWorkflowData
    # in the fixture, otherwise
    # "TypeError: object.__new__() takes exactly one argument (the type to instantiate)"
    # will be raised. So far, have no idea why it happens.
    data.dockerfile_images = DockerfileImages(["scratch"])
    data.tag_conf.add_floating_image("registry/app:latest")
    data.plugins_results["plugin_a"] = {"var": "value"}
    data.save(ContextDir(Path(context_dir)))

    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params)
     .should_receive("source")
     .and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    class _FakeDockerBuildWorkflow:
        def __init__(self, build_dir, data=None, **kwargs):
            self.data = data

        def build_docker_image(self):
            assert DockerfileImages(["scratch"]) == self.data.dockerfile_images

    (flexmock(plugin_based.inner)
     .should_receive("DockerBuildWorkflow")
     .replace_with(_FakeDockerBuildWorkflow))

    task.execute()
Example #9
0
 def test_get_workflow_json(self, tmpdir):
     expected = Path(tmpdir.join("workflow.json"))
     assert expected == ContextDir(Path(tmpdir)).workflow_json
     # The ContextDir does not ensure workflow.json is created by itself.
     assert not expected.exists()
Example #10
0
 def test_ensure_dir_exists(self, parent_exists, tmpdir):
     parent_path = tmpdir.join("mounted_volume")
     if parent_exists:
         parent_path.mkdir()
     context_dir = ContextDir(Path(parent_path.join("context_dir")))
     assert context_dir._path.exists()