Example #1
0
def test_build_result():
    with pytest.raises(AssertionError):
        BuildResult(fail_reason='it happens', image_id='spam')

    with pytest.raises(AssertionError):
        BuildResult(fail_reason='', image_id='spam')

    assert BuildResult(fail_reason='it happens').is_failed()
    assert not BuildResult(image_id='spam').is_failed()

    assert BuildResult(image_id='spam', logs=list('logs')).logs == list('logs')

    assert BuildResult(fail_reason='it happens').fail_reason == 'it happens'
    assert BuildResult(image_id='spam').image_id == 'spam'

    assert BuildResult(image_id='spam', annotations={
        'ham': 'mah'
    }).annotations == {
        'ham': 'mah'
    }

    assert BuildResult(image_id='spam', labels={
        'ham': 'mah'
    }).labels == {
        'ham': 'mah'
    }

    assert BuildResult(image_id='spam').is_image_available()
    assert not BuildResult(fail_reason='it happens').is_image_available()
    assert not BuildResult.make_remote_image_result().is_image_available()

    assert not BuildResult.make_remote_image_result().is_failed()
    def run(self):
        release = self.get_release()
        platforms = self.get_platforms()
        koji_upload_dir = self.get_koji_upload_dir()
        task_id = self.get_fs_task_id()

        thread_pool = ThreadPool(len(platforms))
        result = thread_pool.map_async(
            lambda cluster_info: self.do_worker_build(release, cluster_info,
                                                      koji_upload_dir, task_id),
            [self.choose_cluster(platform) for platform in platforms]
        )

        try:
            while not result.ready():
                # The wait call is a blocking call which prevents signals
                # from being processed. Wait for short intervals instead
                # of a single long interval, so build cancellation can
                # be handled virtually immediately.
                result.wait(1)
        # Always clean up worker builds on any error to avoid
        # runaway worker builds (includes orchestrator build cancellation)
        except Exception:
            self.log.info('build cancelled, cancelling worker builds')
            if self.worker_builds:
                ThreadPool(len(self.worker_builds)).map(
                    lambda bi: bi.cancel_build(), self.worker_builds)
            while not result.ready():
                result.wait(1)
            raise

        annotations = {'worker-builds': {
            build_info.platform: build_info.get_annotations()
            for build_info in self.worker_builds if build_info.build
        }}

        self._apply_repositories(annotations)

        labels = self._make_labels()

        fail_reasons = {
            build_info.platform: build_info.get_fail_reason()
            for build_info in self.worker_builds
            if not build_info.build or not build_info.build.is_succeeded()
        }

        self.workflow.plugin_workspace[self.key] = {
            WORKSPACE_KEY_UPLOAD_DIR: koji_upload_dir,
            WORKSPACE_KEY_BUILD_INFO: {build_info.platform: build_info
                                       for build_info in self.worker_builds},
        }

        if fail_reasons:
            return BuildResult(fail_reason=json.dumps(fail_reasons),
                               annotations=annotations, labels=labels)

        return BuildResult.make_remote_image_result(annotations, labels=labels)
    def run(self):
        if not self.platforms:
            raise RuntimeError("No enabled platform to build on")
        self.set_build_image()

        thread_pool = ThreadPool(len(self.platforms))
        result = thread_pool.map_async(self.select_and_start_cluster,
                                       self.platforms)

        try:
            result.get()
        # Always clean up worker builds on any error to avoid
        # runaway worker builds (includes orchestrator build cancellation)
        except Exception:
            thread_pool.terminate()
            self.log.info('build cancelled, cancelling worker builds')
            if self.worker_builds:
                ThreadPool(len(self.worker_builds)).map(
                    lambda bi: bi.cancel_build(), self.worker_builds)
            while not result.ready():
                result.wait(1)
            raise
        else:
            thread_pool.close()
            thread_pool.join()

        annotations = {
            'worker-builds': {
                build_info.platform: build_info.get_annotations()
                for build_info in self.worker_builds if build_info.build
            }
        }

        self._apply_repositories(annotations)

        labels = self._make_labels()

        fail_reasons = {
            build_info.platform: build_info.get_fail_reason()
            for build_info in self.worker_builds
            if not build_info.build or not build_info.build.is_succeeded()
        }

        workspace = self.workflow.plugin_workspace.setdefault(self.key, {})
        workspace[WORKSPACE_KEY_UPLOAD_DIR] = self.koji_upload_dir
        workspace[WORKSPACE_KEY_BUILD_INFO] = {
            build_info.platform: build_info
            for build_info in self.worker_builds
        }

        if fail_reasons:
            return BuildResult(fail_reason=json.dumps(fail_reasons),
                               annotations=annotations,
                               labels=labels)

        return BuildResult.make_remote_image_result(annotations, labels=labels)
def test_tag_parse(tmpdir, docker_tasker, floating_tags, unique_tags,
                   primary_tags, expected):
    df = df_parser(str(tmpdir))
    df.content = DF_CONTENT_LABELS

    workflow = mock_workflow(tmpdir)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    workflow.build_result = BuildResult.make_remote_image_result()

    base_inspect = {
        INSPECT_CONFIG: {
            'Labels': {
                'parentrelease': '7.4.1'
            },
            'Env': {
                'parentrelease': '7.4.1'
            },
        }
    }
    setattr(workflow.builder, 'base_image_inspect', base_inspect)
    mock_additional_tags_file(str(tmpdir), ['get_tags', 'file_tags'])

    if unique_tags is not None and primary_tags is not None and floating_tags is not None:
        input_tags = {
            'unique': unique_tags,
            'primary': primary_tags,
            'floating': floating_tags,
        }
    else:
        input_tags = None
    runner = PostBuildPluginsRunner(docker_tasker, workflow,
                                    [{
                                        'name': TagFromConfigPlugin.key,
                                        'args': {
                                            'tag_suffixes': input_tags
                                        }
                                    }])
    if expected is not None:
        results = runner.run()
        plugin_result = results[TagFromConfigPlugin.key]

        # Plugin should return the tags we expect
        assert plugin_result == expected

        # Workflow should have the expected tags configured
        for tag in expected:
            assert any(tag == str(image) for image in workflow.tag_conf.images)

        # Workflow should not have any other tags configured
        assert len(workflow.tag_conf.images) == len(expected)
    else:
        with pytest.raises(PluginFailedException):
            runner.run()
def test_tags_enclosed(tmpdir, docker_tasker, name, organization, expected):
    df = df_parser(str(tmpdir))
    df.content = dedent("""\
        FROM fedora
        LABEL "name"="{}"
        LABEL "version"="1.7"
        LABEL "release"="99"
    """.format(name))

    workflow = mock_workflow(tmpdir)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    workflow.build_result = BuildResult.make_remote_image_result()

    if organization:
        reactor_config = ReactorConfig({
            'version': 1,
            'registries_organization': organization
        })
        workflow.plugin_workspace[ReactorConfigPlugin.key] = {
            WORKSPACE_CONF_KEY: reactor_config
        }

    input_tags = {
        'unique': ['foo', 'bar'],
        'primary': ['{version}', '{version}-{release}'],
    }

    runner = PostBuildPluginsRunner(docker_tasker, workflow,
                                    [{
                                        'name': TagFromConfigPlugin.key,
                                        'args': {
                                            'tag_suffixes': input_tags
                                        }
                                    }])

    results = runner.run()
    plugin_result = results[TagFromConfigPlugin.key]

    expected_tags = [
        '{}:{}'.format(expected, tag)
        for tag in ['foo', 'bar', '1.7', '1.7-99']
    ]
    # Plugin should return the tags we expect
    assert plugin_result == expected_tags

    # Workflow should have the expected tags configured
    for tag in expected_tags:
        assert any(tag == str(image) for image in workflow.tag_conf.images)

    # Workflow should not have any other tags configured
    assert len(workflow.tag_conf.images) == len(expected_tags)
    def run(self):
        if not self.platforms:
            raise RuntimeError("No enabled platform to build on")
        self.set_build_image()

        thread_pool = ThreadPool(len(self.platforms))
        result = thread_pool.map_async(self.select_and_start_cluster, self.platforms)

        try:
            result.get()
        # Always clean up worker builds on any error to avoid
        # runaway worker builds (includes orchestrator build cancellation)
        except Exception:
            thread_pool.terminate()
            self.log.info('build cancelled, cancelling worker builds')
            if self.worker_builds:
                ThreadPool(len(self.worker_builds)).map(
                    lambda bi: bi.cancel_build(), self.worker_builds)
            while not result.ready():
                result.wait(1)
            raise
        else:
            thread_pool.close()
            thread_pool.join()

        annotations = {'worker-builds': {
            build_info.platform: build_info.get_annotations()
            for build_info in self.worker_builds if build_info.build
        }}

        self._apply_repositories(annotations)

        labels = self._make_labels()

        fail_reasons = {
            build_info.platform: build_info.get_fail_reason()
            for build_info in self.worker_builds
            if not build_info.build or not build_info.build.is_succeeded()
        }

        workspace = self.workflow.plugin_workspace.setdefault(self.key, {})
        workspace[WORKSPACE_KEY_UPLOAD_DIR] = self.koji_upload_dir
        workspace[WORKSPACE_KEY_BUILD_INFO] = {build_info.platform: build_info
                                               for build_info in self.worker_builds}

        if fail_reasons:
            return BuildResult(fail_reason=json.dumps(fail_reasons),
                               annotations=annotations, labels=labels)

        return BuildResult.make_remote_image_result(annotations, labels=labels)
Example #7
0
def test_build_result():
    with pytest.raises(AssertionError):
        BuildResult(fail_reason='it happens', image_id='spam')

    with pytest.raises(AssertionError):
        BuildResult(fail_reason='', image_id='spam')

    assert BuildResult(fail_reason='it happens').is_failed()
    assert not BuildResult(image_id='spam').is_failed()

    assert BuildResult(image_id='spam', logs=list('logs')).logs == list('logs')

    assert BuildResult(fail_reason='it happens').fail_reason == 'it happens'
    assert BuildResult(image_id='spam').image_id == 'spam'

    assert BuildResult(image_id='spam', annotations={'ham': 'mah'}).annotations == {'ham': 'mah'}

    assert BuildResult(image_id='spam', labels={'ham': 'mah'}).labels == {'ham': 'mah'}

    assert BuildResult(image_id='spam').is_image_available()
    assert not BuildResult(fail_reason='it happens').is_image_available()
    assert not BuildResult.make_remote_image_result().is_image_available()

    assert not BuildResult.make_remote_image_result().is_failed()
def test_tag_parse(tmpdir, docker_tasker, floating_tags, unique_tags, primary_tags, expected):
    df = df_parser(str(tmpdir))
    df.content = DF_CONTENT_LABELS

    workflow = mock_workflow(tmpdir)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    workflow.build_result = BuildResult.make_remote_image_result()

    base_inspect = {
        INSPECT_CONFIG: {
            'Labels': {'parentrelease': '7.4.1'},
            'Env': {'parentrelease': '7.4.1'},
        }
    }
    setattr(workflow.builder, 'base_image_inspect', base_inspect)
    mock_additional_tags_file(str(tmpdir), ['get_tags', 'file_tags'])

    if unique_tags is not None and primary_tags is not None and floating_tags is not None:
        input_tags = {
            'unique': unique_tags,
            'primary': primary_tags,
            'floating': floating_tags,
        }
    else:
        input_tags = None
    runner = PostBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{'name': TagFromConfigPlugin.key,
          'args': {'tag_suffixes': input_tags}}]
    )
    if expected is not None:
        results = runner.run()
        plugin_result = results[TagFromConfigPlugin.key]

        # Plugin should return the tags we expect
        assert plugin_result == expected

        # Workflow should have the expected tags configured
        for tag in expected:
            assert any(tag == str(image) for image in workflow.tag_conf.images)

        # Workflow should not have any other tags configured
        assert len(workflow.tag_conf.images) == len(expected)
    else:
        with pytest.raises(PluginFailedException):
            runner.run()
def test_tags_enclosed(tmpdir, docker_tasker, name, organization, expected):
    df = df_parser(str(tmpdir))
    df.content = dedent("""\
        FROM fedora
        LABEL "name"="{}"
        LABEL "version"="1.7"
        LABEL "release"="99"
    """.format(name))

    workflow = mock_workflow(tmpdir)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    workflow.build_result = BuildResult.make_remote_image_result()

    if organization:
        reactor_config = ReactorConfig({
            'version': 1,
            'registries_organization': organization
        })
        workflow.plugin_workspace[ReactorConfigPlugin.key] = {WORKSPACE_CONF_KEY: reactor_config}

    input_tags = {
        'unique': ['foo', 'bar'],
        'primary': ['{version}', '{version}-{release}'],
    }

    runner = PostBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{'name': TagFromConfigPlugin.key,
          'args': {'tag_suffixes': input_tags}}]
    )

    results = runner.run()
    plugin_result = results[TagFromConfigPlugin.key]

    expected_tags = ['{}:{}'.format(expected, tag) for tag in ['foo', 'bar', '1.7', '1.7-99']]
    # Plugin should return the tags we expect
    assert plugin_result == expected_tags

    # Workflow should have the expected tags configured
    for tag in expected_tags:
        assert any(tag == str(image) for image in workflow.tag_conf.images)

    # Workflow should not have any other tags configured
    assert len(workflow.tag_conf.images) == len(expected_tags)
Example #10
0
def test_arrangementv4_repositories(tmpdir, group_manifests, restore,
                                    reactor_config_map):
    workflow = prepare(reactor_config_map=reactor_config_map)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])

    worker_data = {
        'repositories': {
            'primary': ['worker:1'],
            'unique': ['worker:unique'],
        },
    }
    workflow.buildstep_result[OrchestrateBuildPlugin.key] = worker_data
    workflow.build_result = BuildResult.make_remote_image_result(
        annotations=worker_data)
    if group_manifests is not None:
        workflow.postbuild_results[
            PLUGIN_GROUP_MANIFESTS_KEY] = group_manifests
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    repositories = json.loads(annotations['repositories'])
    if restore:
        assert repositories != worker_data['repositories']
    else:
        assert repositories == worker_data['repositories']
def test_arrangementv4_repositories(tmpdir, group_manifests, restore):
    workflow = prepare()
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )

    worker_data = {
        'repositories': {
            'primary': ['worker:1'],
            'unique': ['worker:unique'],
        },
    }
    workflow.buildstep_result[OrchestrateBuildPlugin.key] = worker_data
    workflow.build_result = BuildResult.make_remote_image_result(annotations=worker_data)
    if group_manifests is not None:
        workflow.postbuild_results[PLUGIN_GROUP_MANIFESTS_KEY] = group_manifests
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    repositories = json.loads(annotations['repositories'])
    if restore:
        assert repositories != worker_data['repositories']
    else:
        assert repositories == worker_data['repositories']
def test_tag_parse(tmpdir, docker_tasker, unique_tags, primary_tags, expected):
    df = df_parser(str(tmpdir))
    df.content = DF_CONTENT_LABELS

    workflow = mock_workflow(tmpdir)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    workflow.build_result = BuildResult.make_remote_image_result()

    flexmock(workflow,
             base_image_inspect={
                 INSPECT_CONFIG: {
                     'Labels': {
                         'parentrelease': '7.4.1'
                     },
                     'Env': {
                         'parentrelease': '7.4.1'
                     },
                 }
             })
    mock_additional_tags_file(str(tmpdir), ['get_tags', 'file_tags'])

    if unique_tags is not None and primary_tags is not None:
        input_tags = {'unique': unique_tags, 'primary': primary_tags}
    else:
        input_tags = None
    runner = PostBuildPluginsRunner(docker_tasker, workflow,
                                    [{
                                        'name': TagFromConfigPlugin.key,
                                        'args': {
                                            'tag_suffixes': input_tags
                                        }
                                    }])
    if expected is not None:
        results = runner.run()
        plugin_result = results[TagFromConfigPlugin.key]
        assert plugin_result == expected
    else:
        with pytest.raises(PluginFailedException):
            runner.run()
def test_tag_parse(tmpdir, docker_tasker, unique_tags, primary_tags, expected):
    df = df_parser(str(tmpdir))
    df.content = DF_CONTENT_LABELS

    workflow = mock_workflow(tmpdir)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    workflow.build_result = BuildResult.make_remote_image_result()

    flexmock(workflow, base_image_inspect={
        INSPECT_CONFIG: {
            'Labels': {'parentrelease': '7.4.1'},
            'Env': {'parentrelease': '7.4.1'},
        }
    })
    mock_additional_tags_file(str(tmpdir), ['get_tags', 'file_tags'])

    if unique_tags is not None and primary_tags is not None:
        input_tags = {
            'unique': unique_tags,
            'primary': primary_tags
        }
    else:
        input_tags = None
    runner = PostBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{'name': TagFromConfigPlugin.key,
          'args': {'tag_suffixes': input_tags}}]
    )
    if expected is not None:
        results = runner.run()
        plugin_result = results[TagFromConfigPlugin.key]
        assert plugin_result == expected
    else:
        with pytest.raises(PluginFailedException):
            runner.run()
Example #14
0
import inspect
import signal
import threading

from time import sleep

from atomic_reactor.inner import BuildResults, BuildResultsEncoder, BuildResultsJSONDecoder
from atomic_reactor.inner import DockerBuildWorkflow

BUILD_RESULTS_ATTRS = [
    'build_logs', 'built_img_inspect', 'built_img_info', 'base_img_info',
    'base_plugins_output', 'built_img_plugins_output'
]
DUMMY_BUILD_RESULT = BuildResult(image_id="image_id")
DUMMY_FAILED_BUILD_RESULT = BuildResult(fail_reason='it happens')
DUMMY_REMOTE_BUILD_RESULT = BuildResult.make_remote_image_result()


def test_build_results_encoder():
    results = BuildResults()
    expected_data = {}
    for attr in BUILD_RESULTS_ATTRS:
        setattr(results, attr, attr)
        expected_data[attr] = attr

    data = json.loads(json.dumps(results, cls=BuildResultsEncoder))
    assert data == expected_data


def test_build_results_decoder():
    data = {}
from atomic_reactor.inner import BuildResults, BuildResultsEncoder, BuildResultsJSONDecoder
from atomic_reactor.inner import DockerBuildWorkflow
from atomic_reactor.inner import FSWatcher
from atomic_reactor.constants import INSPECT_ROOTFS, INSPECT_ROOTFS_LAYERS


BUILD_RESULTS_ATTRS = ['build_logs',
                       'built_img_inspect',
                       'built_img_info',
                       'base_img_info',
                       'base_plugins_output',
                       'built_img_plugins_output']
DUMMY_BUILD_RESULT = BuildResult(image_id="image_id")
DUMMY_FAILED_BUILD_RESULT = BuildResult(fail_reason='it happens')
DUMMY_REMOTE_BUILD_RESULT = BuildResult.make_remote_image_result()


def test_build_results_encoder():
    results = BuildResults()
    expected_data = {}
    for attr in BUILD_RESULTS_ATTRS:
        setattr(results, attr, attr)
        expected_data[attr] = attr

    data = json.loads(json.dumps(results, cls=BuildResultsEncoder))
    assert data == expected_data


def test_build_results_decoder():
    data = {}