Ejemplo n.º 1
0
    def task_with_mocked_deps(self, monkeypatch, context_dir, build_dir, dummy_source, tmpdir):
        """Create a PluginBasedTask instance with mocked task parameters.

        Mock DockerBuildWorkflow accordingly. Return the mocked workflow instance for further
        customization in individual tests.
        """
        task_params = TaskParams(build_dir=build_dir,
                                 config_file="config.yaml",
                                 context_dir=str(context_dir),
                                 namespace="test-namespace",
                                 pipeline_run_name='test-pipeline-run',
                                 user_params={"a": "b"})

        expect_source = dummy_source
        (flexmock(task_params)
         .should_receive("source")
         .and_return(expect_source))

        expect_plugins = []
        monkeypatch.setattr(plugin_based.PluginBasedTask, "plugins_conf", expect_plugins)

        root_build_dir = RootBuildDir(build_dir)

        # Help to verify the RootBuildDir object is passed to the workflow object.
        (flexmock(plugin_based.PluginBasedTask)
         .should_receive("get_build_dir")
         .and_return(root_build_dir))

        # The test methods inside this test case do not involve the workflow
        # data. Thanks the dataclass, flexmock is able to assert the workflow
        # data object, which is created during task execution, is the same as
        # this one, that is they are all workflow data objects and the data
        # included are same.
        wf_data = ImageBuildWorkflowData()

        mocked_workflow = flexmock(inner.DockerBuildWorkflow)
        (
            mocked_workflow
            .should_call("__init__")
            .once()
            .with_args(
                context_dir=ContextDir,
                build_dir=root_build_dir,
                data=wf_data,
                namespace="test-namespace",
                pipeline_run_name='test-pipeline-run',
                source=expect_source,
                plugins_conf=expect_plugins,
                user_params={"a": "b"},
                reactor_config_path="config.yaml",
                keep_plugins_running=False,
            )
        )
        mocked_workflow.should_receive("build_docker_image").and_raise(
            AssertionError("you must mock the build_docker_image() workflow method")
        )

        task = plugin_based.PluginBasedTask(task_params)
        return task, mocked_workflow
Ejemplo n.º 2
0
def binary_container_exit(task_args: dict):
    """Run binary container exit steps.

    :param task_args: CLI arguments for a binary-container-exit task
    """
    params = TaskParams.from_cli_args(task_args)
    task = BinaryExitTask(params)
    return task.run(init_build_dirs=True)
Ejemplo n.º 3
0
def binary_container_postbuild(task_args: dict):
    """Run binary container post-build steps.

    :param task_args: CLI arguments for a binary-container-postbuild task
    """
    params = TaskParams.from_cli_args(task_args)
    task = BinaryPostBuildTask(params)
    return task.run(init_build_dirs=True)
Ejemplo n.º 4
0
def binary_container_prebuild(task_args: dict):
    """Run binary container pre-build steps.

    :param task_args: CLI arguments for a binary-container-prebuild task
    """
    params = TaskParams.from_cli_args(task_args)
    task = BinaryPreBuildTask(params)
    return task.run()
Ejemplo n.º 5
0
def clone(task_args: dict):
    """Clone source to build.

    :param task_args: CLI arguments for a clone task
    """
    params = TaskParams.from_cli_args(task_args)
    task = CloneTask(params)
    return task.run()
Ejemplo n.º 6
0
def test_ensure_workflow_data_is_saved_in_various_conditions(
        build_result, build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()
    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params).should_receive("source").and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    if build_result == "normal_return":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").once())

        task.execute()

    elif build_result == "error_raised":
        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").and_raise(BuildCanceledException))

        with pytest.raises(BuildCanceledException):
            task.execute()

    elif build_result == "terminated":
        # Start the task.execute in a separate process and terminate it.
        # This simulates the Cancel behavior by TERM signal.

        def _build_docker_image(self, *args, **kwargs):
            def _cancel_build(*args, **kwargs):
                raise BuildCanceledException()

            signal.signal(signal.SIGTERM, _cancel_build)
            # Whatever how long to sleep, just meaning it's running.
            time.sleep(5)

        (flexmock(plugin_based.inner.DockerBuildWorkflow).should_receive(
            "build_docker_image").replace_with(_build_docker_image))

        proc = multiprocessing.Process(target=task.execute)
        proc.start()

        # wait a short a while for the task.execute to run in the separate process.
        time.sleep(0.3)
        proc.terminate()

    assert context_dir.join("workflow.json").exists()

    wf_data = ImageBuildWorkflowData()
    wf_data.load_from_dir(ContextDir(Path(context_dir)))
    # As long as the data is loaded successfully, just check some
    # attributes to check the data.
    assert DockerfileImages() == wf_data.dockerfile_images
    assert {} == wf_data.prebuild_results
Ejemplo n.º 7
0
def test_workflow_data_is_restored_before_starting_to_build(build_dir, dummy_source, tmpdir):
    context_dir = tmpdir.join("context_dir").mkdir()

    # Write workflow data as it was saved by a previous task
    data = ImageBuildWorkflowData()
    # Note: for this test, dockerfile_images can't be passed as a kwarg to
    # the ImageBuildWorkflowData directly due to the flexmock of ImageBuildWorkflowData
    # in the fixture, otherwise
    # "TypeError: object.__new__() takes exactly one argument (the type to instantiate)"
    # will be raised. So far, have no idea why it happens.
    data.dockerfile_images = DockerfileImages(["scratch"])
    data.tag_conf.add_floating_image("registry/app:latest")
    data.plugins_results["plugin_a"] = {"var": "value"}
    data.save(ContextDir(Path(context_dir)))

    params = TaskParams(build_dir=str(build_dir),
                        config_file="config.yaml",
                        context_dir=str(context_dir),
                        namespace="test-namespace",
                        pipeline_run_name='test-pipeline-run',
                        user_params={})
    (flexmock(params)
     .should_receive("source")
     .and_return(dummy_source))

    task = plugin_based.PluginBasedTask(params)

    class _FakeDockerBuildWorkflow:
        def __init__(self, build_dir, data=None, **kwargs):
            self.data = data

        def build_docker_image(self):
            assert DockerfileImages(["scratch"]) == self.data.dockerfile_images

    (flexmock(plugin_based.inner)
     .should_receive("DockerBuildWorkflow")
     .replace_with(_FakeDockerBuildWorkflow))

    task.execute()