def run(self):
        """
        run the plugin
        """
        yum_repos = {k: v for k, v in self.workflow.files.items() if k.startswith(YUM_REPOS_DIR)}
        if not yum_repos:
            return
        # absolute path in containers -> relative path within context
        host_repos_path = os.path.join(self.workflow.builder.df_dir, RELATIVE_REPOS_PATH)
        self.log.info("creating directory for yum repos: %s", host_repos_path)
        os.mkdir(host_repos_path)

        for repo, repo_content in self.workflow.files.items():
            yum_repo = YumRepo(repourl=repo, content=repo_content, dst_repos_dir=host_repos_path,
                               add_hash=False)
            yum_repo.write_content()

        # Find out the USER inherited from the base image
        inspect = self.workflow.builder.base_image_inspect
        inherited_user = ''
        if not self.workflow.builder.base_from_scratch:
            inherited_user = inspect.get(INSPECT_CONFIG).get('User', '')
        df = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        yum_repos = list(self.workflow.files)
        add_yum_repos_to_dockerfile(yum_repos, df, inherited_user,
                                    self.workflow.builder.base_from_scratch)
def test_metadata_plugin_rpmqa_failure(tmpdir):
    initial_timestamp = datetime.now()
    workflow = prepare()
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {}
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: RuntimeError(),
    }
    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(),
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
    }
    workflow.plugins_errors = {
        PostBuildRPMqaPlugin.key: 'foo'
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert "logs" in annotations
    assert "rpm-packages" in annotations
    assert "repositories" in annotations
    assert "commit_id" in annotations
    assert "base-image-id" in annotations
    assert "base-image-name" in annotations
    assert "image-id" in annotations

    # On rpmqa failure, rpm-packages should be empty
    assert len(annotations["rpm-packages"]) == 0

    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["errors"]
    assert "all_rpm_packages" in plugins_metadata["durations"]
    def run(self):
        """
        run the plugin
        """
        yum_repos = {
            k: v
            for k, v in self.workflow.files.items()
            if k.startswith(YUM_REPOS_DIR)
        }
        if not yum_repos:
            return
        # absolute path in containers -> relative path within context
        repos_host_cont_mapping = {}
        host_repos_path = os.path.join(self.workflow.builder.df_dir,
                                       RELATIVE_REPOS_PATH)
        self.log.info("creating directory for yum repos: %s", host_repos_path)
        os.mkdir(host_repos_path)

        for repo, repo_content in self.workflow.files.items():
            yum_repo = YumRepo(repourl=repo,
                               content=repo_content,
                               dst_repos_dir=host_repos_path)
            repos_host_cont_mapping[repo] = yum_repo.write_and_return_content()

        # Find out the USER inherited from the base image
        inspect = self.workflow.builder.base_image_inspect
        inherited_user = inspect[INSPECT_CONFIG].get('User', '')
        df = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        add_yum_repos_to_dockerfile(repos_host_cont_mapping, df,
                                    inherited_user)
def test_missing_koji_build_id(tmpdir, reactor_config_map):  # noqa
    workflow = prepare(reactor_config_map=reactor_config_map)
    workflow.exit_results = {}
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" not in labels
def test_store_metadata_fail_update_annotations(tmpdir, caplog, reactor_config_map):  # noqa
    workflow = prepare(reactor_config_map=reactor_config_map)
    workflow.exit_results = {}
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    (flexmock(OSBS)
        .should_receive('update_annotations_on_build')
        .and_raise(OsbsResponseException('/', 'failed', 0)))
    with pytest.raises(PluginFailedException):
        runner.run()
    assert 'annotations:' in caplog.text
def test_metadata_plugin_rpmqa_failure(tmpdir, reactor_config_map):  # noqa
    initial_timestamp = datetime.now()
    workflow = prepare(reactor_config_map=reactor_config_map)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {}
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: RuntimeError(),
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: {'metadata_fragment_key': 'metadata.json',
                                        'metadata_fragment': 'configmap/build-1-md'}
    }
    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(),
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: (initial_timestamp + timedelta(seconds=3)).isoformat(),
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 3.03,
    }
    workflow.plugins_errors = {
        PostBuildRPMqaPlugin.key: 'foo',
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 'bar',
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert "repositories" in annotations
    assert "commit_id" in annotations
    assert "base-image-id" in annotations
    assert "base-image-name" in annotations
    assert "image-id" in annotations
    assert "metadata_fragment" in annotations
    assert "metadata_fragment_key" in annotations
    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["errors"]
    assert "all_rpm_packages" in plugins_metadata["durations"]
def test_labels_metadata_plugin(tmpdir, koji_plugin, reactor_config_map):

    koji_build_id = 1234
    workflow = prepare(reactor_config_map=reactor_config_map)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.exit_results = {
        koji_plugin: koji_build_id,
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" in labels
    assert is_string_type(labels["koji-build-id"])
    assert int(labels["koji-build-id"]) == koji_build_id
def test_yuminject_plugin_notwrapped(tmpdir):
    df_content = """\
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    tasker, workflow = prepare(df.dockerfile_path)

    metalink = 'https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch'

    workflow.files[os.path.join(YUM_REPOS_DIR, DEFAULT_YUM_REPOFILE_NAME)] = render_yum_repo(OrderedDict(
        (('name', 'my-repo'),
         ('metalink', metalink),
         ('enabled', 1),
         ('gpgcheck', 0)),
    ))

    runner = PreBuildPluginsRunner(tasker, workflow, [{
        'name': InjectYumRepoPlugin.key,
        'args': {
            "wrap_commands": False
        }
    }])
    runner.run()
    assert InjectYumRepoPlugin.key is not None

    expected_output = r"""FROM fedora
ADD atomic-reactor-repos/* '/etc/yum.repos.d/'
RUN yum install -y python-django
CMD blabla
RUN rm -f '/etc/yum.repos.d/atomic-reactor-injected.repo'
"""
    assert expected_output == df.content
    def __init__(self, tasker, workflow, nvr=None, destdir="/root/buildinfo/",
                 use_final_dockerfile=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param nvr: name-version-release, will be appended to Dockerfile-.
                    If not specified, try to get it from Name, Version, Release labels.
        :param destdir: directory in the image to put Dockerfile-N-V-R into
        :param use_final_dockerfile: bool, when set to True, uses final version of processed dockerfile,
                                     when set to False, uses Dockerfile from time when this plugin was executed
        """
        # call parent constructor
        super(AddDockerfilePlugin, self).__init__(tasker, workflow)

        self.use_final_dockerfile = use_final_dockerfile

        if nvr is None:
            labels = df_parser(self.workflow.builder.df_path).labels
            name = get_preferred_label(labels, 'name')
            version = get_preferred_label(labels, 'version')
            release = get_preferred_label(labels, 'release')
            if name is None or version is None or release is None:
                raise ValueError("You have to specify either nvr arg or name/version/release labels.")
            nvr = "{0}-{1}-{2}".format(name, version, release)
            nvr = nvr.replace("/", "-")
        self.df_name = '{0}-{1}'.format(DOCKERFILE_FILENAME, nvr)
        self.df_dir = destdir
        self.df_path = os.path.join(self.df_dir, self.df_name)

        # we are not using final dockerfile, so let's copy current snapshot
        if not self.use_final_dockerfile:
            local_df_path = os.path.join(self.workflow.builder.df_dir, self.df_name)
            shutil.copy2(self.workflow.builder.df_path, local_df_path)
def test_update_base_image(organization, tmpdir, reactor_config_map, docker_tasker):
    df_content = dedent("""\
        FROM {}
        LABEL horses=coconuts
        CMD whoah
    """)
    dfp = df_parser(str(tmpdir))
    image_str = "base:image"
    dfp.content = df_content.format(image_str)
    base_str = "base@sha256:1234"
    base_image_name = ImageName.parse("base@sha256:1234")

    enclosed_parent = ImageName.parse(image_str)
    if organization and reactor_config_map:
        enclosed_parent.enclose(organization)

    workflow = mock_workflow()
    workflow.builder.set_df_path(dfp.dockerfile_path)
    workflow.builder.parent_images = {enclosed_parent: base_image_name}
    workflow.builder.base_image = base_image_name
    workflow.builder.set_parent_inspection_data(base_str, dict(Id=base_str))
    workflow.builder.tasker.inspect_image = lambda *_: dict(Id=base_str)

    run_plugin(workflow, reactor_config_map, docker_tasker, organization=organization)
    expected_df = df_content.format(base_str)
    assert dfp.content == expected_df
def test_multistage_dockerfiles(name, inherited_user, dockerfile, expect_cleanup_lines,
                                base_from_scratch, tmpdir):
    # expect repo ADD instructions where indicated in the content, and RUN rm at the end.
    # begin by splitting on "### ADD HERE" so we know where to expect changes.
    segments = re.split(r'^.*ADD HERE.*$\n?', dockerfile, flags=re.M)
    segment_lines = [seg.splitlines(True) for seg in segments]

    # build expected contents by manually inserting expected ADD lines between the segments
    for lines in segment_lines[:-1]:
        lines.append("ADD %s* '/etc/yum.repos.d/'\n" % RELATIVE_REPOS_PATH)
    expected_lines = list(itertools.chain.from_iterable(segment_lines))  # flatten lines

    # now run the plugin to transform the given dockerfile
    df = df_parser(str(tmpdir))
    df.content = ''.join(segments)  # dockerfile without the "### ADD HERE" lines
    tasker, workflow = prepare(df.dockerfile_path, inherited_user)
    workflow.builder.set_base_from_scratch(base_from_scratch)
    repo_file = 'myrepo.repo'
    repo_path = os.path.join(YUM_REPOS_DIR, repo_file)
    workflow.files[repo_path] = repocontent
    runner = PreBuildPluginsRunner(tasker, workflow, [{
            'name': InjectYumRepoPlugin.key,
            'args': {}}])
    runner.run()

    # assert the Dockerfile has changed as expected up to the cleanup lines
    new_df = df.lines
    assert new_df[:len(expected_lines)] == expected_lines

    # the rest of the lines should be cleanup lines
    cleanup_lines = new_df[len(expected_lines):]
    assert remove_lines_match(cleanup_lines, expect_cleanup_lines, [repo_file])
def test_add_labels_plugin(tmpdir, docker_tasker,
                           df_content, labels_conf_base, labels_conf, dont_overwrite, aliases,
                           expected_output, caplog):
    df = df_parser(str(tmpdir))
    df.content = df_content

    if MOCK:
        mock_docker()

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X)
    flexmock(workflow, base_image_inspect=labels_conf_base)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddLabelsPlugin.key,
            'args': {
                'labels': labels_conf,
                'dont_overwrite': dont_overwrite,
                'auto_labels': [],
                'aliases': aliases,
            }
        }]
    )

    runner.run()
    if isinstance(expected_output, RuntimeError):
        assert "plugin 'add_labels_in_dockerfile' raised an exception: RuntimeError" in caplog.text()

    else:
        assert AddLabelsPlugin.key is not None
        assert df.content in expected_output
def test_filter_nonpulp_repositories(tmpdir, pulp_registries, docker_registries,
                                     is_orchestrator, expected, reactor_config_map):
    workflow = prepare(pulp_registries=pulp_registries,
                       docker_registries=docker_registries,
                       reactor_config_map=reactor_config_map)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    if is_orchestrator:
        workflow.buildstep_result[OrchestrateBuildPlugin.key] = 'foo'
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    repositories = json.loads(annotations['repositories'])
    assert repositories == expected
def test_copy_from_is_blocked(tmpdir):
    """test when user has specified COPY --from=image (instead of builder)"""
    dfp = df_parser(str(tmpdir))
    if MOCK:
        mock_docker()
    source = {'provider': 'path', 'uri': 'file://' + str(tmpdir), 'tmpdir': str(tmpdir)}

    dfp.content = dedent("""\
        FROM monty AS vikings
        FROM python
        COPY --from=vikings /spam/eggs /bin/eggs
        COPY --from=0 /spam/eggs /bin/eggs
        COPY src dest
    """)
    # init calls set_df_path, which should not raise an error:
    InsideBuilder(get_source_instance_for(source), 'built-img')

    dfp.content = dedent("""\
        FROM monty as vikings
        FROM python
        # using a stage name we haven't seen should break:
        COPY --from=notvikings /spam/eggs /bin/eggs
    """)
    with pytest.raises(RuntimeError) as exc_info:
        InsideBuilder(get_source_instance_for(source), 'built-img')  # calls set_df_path at init
    assert "FROM notvikings AS source" in str(exc_info.value)

    dfp.content = dedent("""\
        FROM monty as vikings
        # using an index we haven't seen should break:
        COPY --from=5 /spam/eggs /bin/eggs
    """)
    with pytest.raises(RuntimeError) as exc_info:
        InsideBuilder(get_source_instance_for(source), 'built-img')  # calls set_df_path at init
    assert "COPY --from=5" in str(exc_info.value)
def test_adddockerfile_nvr_from_labels2(tmpdir, docker_tasker,
                                        workflow):  # noqa
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    if MOCK:
        mock_docker()

    prepare(workflow, df.dockerfile_path)
    workflow.builder.set_inspection_data({INSPECT_CONFIG: {"Labels": {}}})

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': AddLabelsPlugin.key,
                                       'args': {
                                           'labels': {
                                               'Name': 'jboss-eap-6-docker',
                                               'Version': '6.4',
                                               'Release': '77'
                                           },
                                           'auto_labels': []
                                       }
                                   }, {
                                       'name': AddDockerfilePlugin.key
                                   }])
    runner.run()
    assert AddDockerfilePlugin.key is not None

    assert "ADD Dockerfile-jboss-eap-6-docker-6.4-77 /root/buildinfo/Dockerfile-jboss-eap-6-docker-6.4-77" in df.content  # noqa
def test_assertlabels_plugin(tmpdir, docker_tasker, df_content, req_labels, expected):
    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AssertLabelsPlugin.key,
            'args': {'required_labels': req_labels}
        }]
    )

    assert AssertLabelsPlugin.key is not None

    if isinstance(expected, PluginFailedException):
        with pytest.raises(PluginFailedException):
            runner.run()
    else:
        runner.run()
def test_add_labels_plugin_generated(tmpdir, docker_tasker, auto_label, value_re_part):
    df = df_parser(str(tmpdir))
    df.content = DF_CONTENT

    if MOCK:
        mock_docker()

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X)
    flexmock(workflow, source=MockSource())
    flexmock(workflow, base_image_inspect=LABELS_CONF_BASE)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddLabelsPlugin.key,
            'args': {'labels': {}, "dont_overwrite": [], "auto_labels": [auto_label],
                     'aliases': {'Build_Host': 'com.redhat.build-host'}}
        }]
    )

    runner.run()
    if value_re_part:
        assert re.match(value_re_part, df.labels[auto_label])

    if auto_label == "build-date":
        utc_dt = datetime.datetime.utcfromtimestamp(atomic_reactor_start_time).isoformat()
        assert df.labels[auto_label] == utc_dt
Beispiel #18
0
    def run(self):
        try:
            # Find out the intended scope for this image
            labels = df_parser(self.workflow.builder.df_path,
                               workflow=self.workflow).labels
            scope = self.get_scope('current', labels)

            # Find out the parent's intended scope
            inspect = self.workflow.builder.base_image_inspect
            parent_labels = {}
            if not self.workflow.builder.base_from_scratch:
                parent_labels = inspect[INSPECT_CONFIG]['Labels']
            parent_scope = self.get_scope('parent', parent_labels)
        except NothingToCheck:
            self.log.debug("no checks performed")
            return

        if scope > parent_scope:
            error = (
                "{label}={scope} but parent has {label}={parent_scope}".format(
                    label=self.SCOPE_LABEL,
                    scope=self.SCOPE_NAME[scope],
                    parent_scope=self.SCOPE_NAME[parent_scope]))
            self.log.error("%s", error)
            raise DisallowedDistributionScope(error)

        self.log.info("distribution scope checked")
def test_add_labels_equal_aliases(tmpdir, docker_tasker, caplog,
                                  base_l, df_l, expected, expected_log,
                                  reactor_config_map):
    if MOCK:
        mock_docker()

    df_content = "FROM fedora\n"
    plugin_labels = {}
    if df_l[0]:
        df_content += 'LABEL description="{0}"\n'.format(df_l[0])
    if df_l[1]:
        df_content += 'LABEL io.k8s.description="{0}"\n'.format(df_l[1])

    base_labels = {INSPECT_CONFIG: {"Labels": {}}}
    if base_l[0]:
        base_labels[INSPECT_CONFIG]["Labels"]["description"] = base_l[0]
    if base_l[1]:
        base_labels[INSPECT_CONFIG]["Labels"]["io.k8s.description"] = base_l[1]

    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X())
    setattr(workflow.builder, 'df_path', df.dockerfile_path)
    setattr(workflow.builder, 'base_image_inspect', base_labels)

    if reactor_config_map:
        make_and_store_reactor_config_map(
            workflow,
            {
                'image_labels': plugin_labels,
                'image_equal_labels': [['description', 'io.k8s.description']]})

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddLabelsPlugin.key,
            'args': {
                'labels': plugin_labels,
                'dont_overwrite': [],
                'auto_labels': [],
                'aliases': {},
                'equal_labels': [['description', 'io.k8s.description']]
            }
        }]
    )

    runner.run()
    assert AddLabelsPlugin.key is not None
    result_fst = df.labels.get("description") or \
        base_labels[INSPECT_CONFIG]["Labels"].get("description")
    result_snd = df.labels.get("io.k8s.description") or \
        base_labels[INSPECT_CONFIG]["Labels"].get("io.k8s.description")
    assert result_fst == expected[0]
    assert result_snd == expected[1]

    if expected_log:
        assert expected_log in caplog.text
Beispiel #20
0
def test_url_label(tmpdir, docker_tasker, caplog, url_format, info_url):
    if MOCK:
        mock_docker()

    plugin_labels = {}
    base_labels = {INSPECT_CONFIG: {"Labels": {}}}
    df = df_parser(str(tmpdir))
    df.content = DF_CONTENT_LABELS

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X)
    flexmock(workflow, base_image_inspect=base_labels)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': AddLabelsPlugin.key,
                                       'args': {
                                           'labels': plugin_labels,
                                           'dont_overwrite': [],
                                           'auto_labels': [],
                                           'info_url_format': url_format
                                       }
                                   }])

    if info_url is not None:
        runner.run()
        assert df.labels.get("url") == info_url

    else:
        with pytest.raises(PluginFailedException):
            runner.run()

    assert AddLabelsPlugin.key is not None
def test_store_metadata_fail_update_annotations(tmpdir, caplog):
    workflow = prepare()
    workflow.exit_results = {}
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    (flexmock(OSBS).should_receive('set_annotations_on_build').and_raise(
        OsbsResponseException('/', 'failed', 0)))
    with pytest.raises(PluginFailedException):
        runner.run()
    assert 'annotations:' in caplog.text()
Beispiel #22
0
    def build(self):
        """
        build image inside current environment;
        it's expected this may run within (privileged) docker container

        :return: image string (e.g. fedora-python:34)
        """
        try:
            logger.info("building image '%s' inside current environment", self.image)
            self._ensure_not_built()
            logger.debug("using dockerfile:\n%s", df_parser(self.df_path).content)
            logs_gen = self.tasker.build_image_from_path(
                self.df_dir,
                self.image,
            )
            logger.debug("build is submitted, waiting for it to finish")
            command_result = wait_for_command(logs_gen)  # wait for build to finish
            logger.info("build was %ssuccessful!", 'un' if command_result.is_failed() else '')
            self.is_built = True
            if not command_result.is_failed():
                self.built_image_info = self.get_built_image_info()
                # self.base_image_id = self.built_image_info['ParentId']  # parent id is not base image!
                self.image_id = self.built_image_info['Id']
            build_result = BuildResult(command_result, self.image_id)
            return build_result
        except:
            logger.exception("build failed")
            return ExceptionBuildResult()
def test_all_missing_required_labels(tmpdir, docker_tasker, caplog, df_content, req_labels):
    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AssertLabelsPlugin.key,
            'args': {'required_labels': req_labels}
        }]
    )

    assert AssertLabelsPlugin.key is not None

    with pytest.raises(PluginFailedException):
        runner.run()

    error_msg = "Dockerfile is missing required labels: {0}".format(req_labels)
    assert error_msg in caplog.text()
Beispiel #24
0
def test_adddockerfile_plugin(tmpdir, docker_tasker):
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddDockerfilePlugin.key,
            'args': {'nvr': 'rhel-server-docker-7.1-20'}
        }]
    )
    runner.run()
    assert AddDockerfilePlugin.key is not None

    expected_output = """
FROM fedora
RUN yum install -y python-django
ADD Dockerfile-rhel-server-docker-7.1-20 /root/buildinfo/Dockerfile-rhel-server-docker-7.1-20
CMD blabla"""
    assert df.content == expected_output
Beispiel #25
0
def test_adddockerfile_todest(tmpdir, docker_tasker):
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddDockerfilePlugin.key,
            'args': {'nvr': 'jboss-eap-6-docker-6.4-77',
                     'destdir': '/usr/share/doc/'}
        }]
    )
    runner.run()
    assert AddDockerfilePlugin.key is not None

    expected_output = """
FROM fedora
RUN yum install -y python-django
ADD Dockerfile-jboss-eap-6-docker-6.4-77 /usr/share/doc/Dockerfile-jboss-eap-6-docker-6.4-77
CMD blabla"""
    assert df.content == expected_output
Beispiel #26
0
def test_adddockerfile_nvr_from_labels2(tmpdir, docker_tasker):
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    if MOCK:
        mock_docker()

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    flexmock(workflow, base_image_inspect={INSPECT_CONFIG: {"Labels": {}}})
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddLabelsPlugin.key,
            'args': {'labels': {'Name': 'jboss-eap-6-docker',
                                'Version': '6.4',
                                'Release': '77'},
                     'auto_labels': []}
         },
         {
            'name': AddDockerfilePlugin.key
        }]
    )
    runner.run()
    assert AddDockerfilePlugin.key is not None

    assert "ADD Dockerfile-jboss-eap-6-docker-6.4-77 /root/buildinfo/Dockerfile-jboss-eap-6-docker-6.4-77" in df.content
Beispiel #27
0
def test_adddockerfile_nvr_from_labels(tmpdir, docker_tasker):
    df_content = """
FROM fedora
RUN yum install -y python-django
LABEL Name="jboss-eap-6-docker" "Version"="6.4" "Release"=77
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddDockerfilePlugin.key
        }]
    )
    runner.run()
    assert AddDockerfilePlugin.key is not None

    assert "ADD Dockerfile-jboss-eap-6-docker-6.4-77 /root/buildinfo/Dockerfile-jboss-eap-6-docker-6.4-77" in df.content
    def run(self):
        try:
            # Find out the intended scope for this image
            labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels
            scope = self.get_scope('current', labels)

            # Find out the parent's intended scope
            inspect = self.workflow.builder.base_image_inspect
            parent_labels = {}
            if not self.workflow.builder.base_from_scratch:
                parent_labels = inspect[INSPECT_CONFIG]['Labels']
            parent_scope = self.get_scope('parent', parent_labels)
        except NothingToCheck:
            self.log.debug("no checks performed")
            return

        if scope > parent_scope:
            error = ("{label}={scope} but parent has {label}={parent_scope}"
                     .format(label=self.SCOPE_LABEL,
                             scope=self.SCOPE_NAME[scope],
                             parent_scope=self.SCOPE_NAME[parent_scope]))
            self.log.error("%s", error)
            raise DisallowedDistributionScope(error)

        self.log.info("distribution scope checked")
Beispiel #29
0
def test_yuminject_multiline(tmpdir):
    df_content = """\
FROM fedora
RUN yum install -y httpd \
                   uwsgi
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    tasker, workflow = prepare(df.dockerfile_path)

    metalink = r'https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch'  # noqa

    workflow.files[os.path.join(YUM_REPOS_DIR, DEFAULT_YUM_REPOFILE_NAME)] = \
        render_yum_repo(OrderedDict((('name', 'my-repo'),
                                    ('metalink', metalink),
                                    ('enabled', 1),
                                    ('gpgcheck', 0)), ))
    runner = PreBuildPluginsRunner(tasker, workflow,
                                   [{'name': InjectYumRepoPlugin.key, 'args': {}}])
    runner.run()
    assert InjectYumRepoPlugin.key is not None

    expected_output = r"""FROM fedora
ADD atomic-reactor-repos/* '/etc/yum.repos.d/'
RUN yum install -y httpd                    uwsgi
CMD blabla
RUN rm -f '/etc/yum.repos.d/atomic-reactor-injected.repo'
"""
    assert df.content == expected_output
Beispiel #30
0
    def __init__(self, source, image, **kwargs):
        """
        """
        LastLogger.__init__(self)
        BuilderStateMachine.__init__(self)

        print_version_of_tools()

        self.tasker = DockerTasker()

        info, version = self.tasker.get_info(), self.tasker.get_version()
        logger.debug(json.dumps(info, indent=2))
        logger.info(json.dumps(version, indent=2))

        # arguments for build
        self.source = source
        self.base_image_id = None
        self.image_id = None
        self.built_image_info = None
        self.image = ImageName.parse(image)

        # get info about base image from dockerfile
        self.df_path, self.df_dir = self.source.get_dockerfile_path()
        self.set_base_image(df_parser(self.df_path).baseimage)
        logger.debug("base image specified in dockerfile = '%s'", self.base_image)
        if not self.base_image.tag:
            self.base_image.tag = 'latest'
def test_assertlabels_plugin(tmpdir, docker_tasker, df_content, req_labels, expected):
    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AssertLabelsPlugin.key,
            'args': {'required_labels': req_labels}
        }]
    )

    assert AssertLabelsPlugin.key is not None

    if isinstance(expected, PluginFailedException):
        with pytest.raises(PluginFailedException):
            runner.run()
    else:
        runner.run()
def test_metadata_plugin_rpmqa_failure(tmpdir, reactor_config_map):  # noqa
    initial_timestamp = datetime.now()
    workflow = prepare(reactor_config_map=reactor_config_map)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.prebuild_results = {}
    workflow.postbuild_results = {
        PostBuildRPMqaPlugin.key: RuntimeError(),
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: {'metadata_fragment_key': 'metadata.json',
                                        'metadata_fragment': 'configmap/build-1-md'}
    }
    workflow.plugins_timestamps = {
        PostBuildRPMqaPlugin.key: (initial_timestamp + timedelta(seconds=3)).isoformat(),
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: (initial_timestamp + timedelta(seconds=3)).isoformat(),
    }
    workflow.plugins_durations = {
        PostBuildRPMqaPlugin.key: 3.03,
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 3.03,
    }
    workflow.plugins_errors = {
        PostBuildRPMqaPlugin.key: 'foo',
        PLUGIN_KOJI_UPLOAD_PLUGIN_KEY: 'bar',
    }

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    assert "dockerfile" in annotations
    assert "repositories" in annotations
    assert "commit_id" in annotations
    assert "base-image-id" in annotations
    assert "base-image-name" in annotations
    assert "image-id" in annotations
    assert "metadata_fragment" in annotations
    assert "metadata_fragment_key" in annotations
    assert "plugins-metadata" in annotations
    assert "errors" in annotations["plugins-metadata"]
    assert "durations" in annotations["plugins-metadata"]
    assert "timestamps" in annotations["plugins-metadata"]

    plugins_metadata = json.loads(annotations["plugins-metadata"])
    assert "all_rpm_packages" in plugins_metadata["errors"]
    assert "all_rpm_packages" in plugins_metadata["durations"]
def test_copy_from_is_blocked(tmpdir):
    """test when user has specified COPY --from=image (instead of builder)"""
    dfp = df_parser(str(tmpdir))
    if MOCK:
        mock_docker()
    source = {'provider': 'path', 'uri': 'file://' + str(tmpdir), 'tmpdir': str(tmpdir)}

    dfp.content = dedent("""\
        FROM monty AS vikings
        FROM python
        COPY --from=vikings /spam/eggs /bin/eggs
        COPY --from=0 /spam/eggs /bin/eggs
        COPY src dest
    """)
    # init calls set_df_path, which should not raise an error:
    InsideBuilder(get_source_instance_for(source), 'built-img')

    dfp.content = dedent("""\
        FROM monty as vikings
        FROM python
        # using a stage name we haven't seen should break:
        COPY --from=notvikings /spam/eggs /bin/eggs
    """)
    with pytest.raises(RuntimeError) as exc_info:
        InsideBuilder(get_source_instance_for(source), 'built-img')  # calls set_df_path at init
    assert "FROM notvikings AS source" in str(exc_info.value)

    dfp.content = dedent("""\
        FROM monty as vikings
        # using an index we haven't seen should break:
        COPY --from=5 /spam/eggs /bin/eggs
    """)
    with pytest.raises(RuntimeError) as exc_info:
        InsideBuilder(get_source_instance_for(source), 'built-img')  # calls set_df_path at init
    assert "COPY --from=5" in str(exc_info.value)
Beispiel #34
0
    def run(self):
        source = get_flatpak_source_info(self.workflow)
        if source is None:
            raise RuntimeError("flatpak_create_dockerfile must be run before flatpak_create_oci")

        self.builder = FlatpakBuilder(source, self.workflow.source.workdir,
                                      'var/tmp/flatpak-build',
                                      parse_manifest=parse_rpm_output,
                                      flatpak_metadata=self.flatpak_metadata)

        df_labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels
        self.builder.add_labels(df_labels)

        tarred_filesystem, manifest = self._export_filesystem()
        self.log.info('filesystem tarfile written to %s', tarred_filesystem)
        self.log.info('manifest written to %s', manifest)

        image_components = self.builder.get_components(manifest)
        self.workflow.image_components = image_components

        ref_name, outfile, tarred_outfile = self.builder.build_container(tarred_filesystem)

        metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI)
        metadata['ref_name'] = ref_name
        self.workflow.exported_image_sequence.append(metadata)

        self.log.info('OCI image is available as %s', outfile)

        metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR)
        metadata['ref_name'] = ref_name
        self.workflow.exported_image_sequence.append(metadata)

        self.log.info('OCI tarfile is available as %s', tarred_outfile)
def test_labels_metadata_plugin(tmpdir, koji_plugin):

    koji_build_id = 1234
    workflow = prepare()
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    workflow.exit_results = {
        koji_plugin: koji_build_id,
    }

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" in labels
    assert is_string_type(labels["koji-build-id"])
    assert int(labels["koji-build-id"]) == koji_build_id
Beispiel #36
0
    def test_append(self, tmpdir, base_release, builds, expected,
                    reactor_config_map):
        class MockedClientSession(object):
            def __init__(self, hub, opts=None):
                pass

            def getBuild(self, build_info):
                if build_info['release'] in builds:
                    return True
                return None

            def krb_login(self, *args, **kwargs):
                return True

        session = MockedClientSession('')
        flexmock(koji, ClientSession=session)

        labels = {
            'com.redhat.component': 'component1',
            'version': 'fc26',
        }
        if base_release:
            labels['release'] = base_release

        plugin = self.prepare(tmpdir,
                              labels=labels,
                              append=True,
                              reactor_config_map=reactor_config_map)
        plugin.run()

        parser = df_parser(plugin.workflow.builder.df_path,
                           workflow=plugin.workflow)
        assert parser.labels['release'] == expected
def test_filter_nonpulp_repositories(tmpdir, pulp_registries,
                                     docker_registries, is_orchestrator,
                                     expected):
    workflow = prepare(pulp_registries=pulp_registries,
                       docker_registries=docker_registries)
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(None, workflow,
                               [{
                                   'name': StoreMetadataInOSv3Plugin.key,
                                   "args": {
                                       "url": "http://example.com/"
                                   }
                               }])
    if is_orchestrator:
        workflow.buildstep_result[OrchestrateBuildPlugin.key] = 'foo'
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    annotations = output[StoreMetadataInOSv3Plugin.key]["annotations"]
    repositories = json.loads(annotations['repositories'])
    assert repositories == expected
def test_adddockerfile_plugin(tmpdir, docker_tasker, workflow):  # noqa
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    prepare(workflow, df.dockerfile_path)

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': AddDockerfilePlugin.key,
                                       'args': {
                                           'nvr': 'rhel-server-docker-7.1-20'
                                       }
                                   }])
    runner.run()
    assert AddDockerfilePlugin.key is not None

    expected_output = """
FROM fedora
RUN yum install -y python-django
ADD Dockerfile-rhel-server-docker-7.1-20 /root/buildinfo/Dockerfile-rhel-server-docker-7.1-20
CMD blabla"""
    assert df.content == expected_output
def test_add_labels_plugin(tmpdir, docker_tasker, df_content, labels_conf_base,
                           labels_conf, dont_overwrite, aliases,
                           expected_output, caplog):
    df = df_parser(str(tmpdir))
    df.content = df_content

    if MOCK:
        mock_docker()

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X)
    flexmock(workflow, base_image_inspect=labels_conf_base)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': AddLabelsPlugin.key,
                                       'args': {
                                           'labels': labels_conf,
                                           'dont_overwrite': dont_overwrite,
                                           'auto_labels': [],
                                           'aliases': aliases,
                                       }
                                   }])

    runner.run()
    if isinstance(expected_output, RuntimeError):
        assert "plugin 'add_labels_in_dockerfile' raised an exception: RuntimeError" in caplog.text(
        )

    else:
        assert AddLabelsPlugin.key is not None
        assert df.content in expected_output
def test_adddockerfile_todest(tmpdir, docker_tasker, workflow):  # noqa
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content

    prepare(workflow, df.dockerfile_path)

    runner = PreBuildPluginsRunner(docker_tasker, workflow,
                                   [{
                                       'name': AddDockerfilePlugin.key,
                                       'args': {
                                           'nvr': 'jboss-eap-6-docker-6.4-77',
                                           'destdir': '/usr/share/doc/'
                                       }
                                   }])
    runner.run()
    assert AddDockerfilePlugin.key is not None

    expected_output = """
FROM fedora
RUN yum install -y python-django
ADD Dockerfile-jboss-eap-6-docker-6.4-77 /usr/share/doc/Dockerfile-jboss-eap-6-docker-6.4-77
CMD blabla"""
    assert df.content == expected_output
    def test_append(self, tmpdir, base_release, builds, expected):

        class MockedClientSession(object):
            def __init__(self, hub, opts=None):
                pass

            def getBuild(self, build_info):
                if build_info['release'] in builds:
                    return True
                return None

        session = MockedClientSession('')
        flexmock(koji, ClientSession=session)

        labels = {
            'com.redhat.component': 'component1',
            'version': 'fc26',
        }
        if base_release:
            labels['release'] = base_release

        plugin = self.prepare(tmpdir, labels=labels,
                              append=True)
        plugin.run()

        parser = df_parser(plugin.workflow.builder.df_path, workflow=plugin.workflow)
        assert parser.labels['release'] == expected
Beispiel #42
0
def test_multistage_dockerfiles(name, inherited_user, dockerfile, expect_cleanup_lines,
                                base_from_scratch, tmpdir):
    # expect repo ADD instructions where indicated in the content, and RUN rm at the end.
    # begin by splitting on "### ADD HERE" so we know where to expect changes.
    segments = re.split(r'^.*ADD HERE.*$\n?', dockerfile, flags=re.M)
    segment_lines = [seg.splitlines(True) for seg in segments]

    # build expected contents by manually inserting expected ADD lines between the segments
    for lines in segment_lines[:-1]:
        lines.append("ADD %s* '/etc/yum.repos.d/'\n" % RELATIVE_REPOS_PATH)
    expected_lines = list(itertools.chain.from_iterable(segment_lines))  # flatten lines

    # now run the plugin to transform the given dockerfile
    df = df_parser(str(tmpdir))
    df.content = ''.join(segments)  # dockerfile without the "### ADD HERE" lines
    tasker, workflow = prepare(df.dockerfile_path, inherited_user)
    workflow.builder.set_base_from_scratch(base_from_scratch)
    repo_file = 'myrepo.repo'
    repo_path = os.path.join(YUM_REPOS_DIR, repo_file)
    workflow.files[repo_path] = repocontent
    runner = PreBuildPluginsRunner(tasker, workflow, [{
            'name': InjectYumRepoPlugin.key,
            'args': {}}])
    runner.run()

    # assert the Dockerfile has changed as expected up to the cleanup lines
    new_df = df.lines
    assert new_df[:len(expected_lines)] == expected_lines

    # the rest of the lines should be cleanup lines
    cleanup_lines = new_df[len(expected_lines):]
    assert remove_lines_match(cleanup_lines, expect_cleanup_lines, [repo_file])
def test_update_base_image(organization, tmpdir, reactor_config_map,
                           docker_tasker):
    df_content = dedent("""\
        FROM {}
        LABEL horses=coconuts
        CMD whoah
    """)
    dfp = df_parser(str(tmpdir))
    image_str = "base:image"
    dfp.content = df_content.format(image_str)
    base_str = "base@sha256:1234"
    base_image_name = ImageName.parse("base@sha256:1234")

    enclosed_parent = ImageName.parse(image_str)
    if organization and reactor_config_map:
        enclosed_parent.enclose(organization)

    workflow = mock_workflow()
    workflow.builder.set_df_path(dfp.dockerfile_path)
    workflow.builder.parent_images = {enclosed_parent: base_image_name}
    workflow.builder.base_image = base_image_name
    workflow.builder.set_parent_inspection_data(base_str, dict(Id=base_str))
    workflow.builder.tasker.inspect_image = lambda *_: dict(Id=base_str)

    run_plugin(workflow,
               reactor_config_map,
               docker_tasker,
               organization=organization)
    expected_df = df_content.format(base_str)
    assert dfp.content == expected_df
Beispiel #44
0
 def set_df_path(self, path):
     self._df_path = path
     self.set_base_image(df_parser(path).baseimage)
     logger.debug("base image specified in dockerfile = '%s'",
                  self.base_image)
     if not self.base_image.tag:
         self.base_image.tag = 'latest'
def test_missing_koji_build_id(tmpdir):
    workflow = prepare()
    workflow.exit_results = {}
    df_content = """
FROM fedora
RUN yum install -y python-django
CMD blabla"""
    df = df_parser(str(tmpdir))
    df.content = df_content
    workflow.builder = X
    workflow.builder.df_path = df.dockerfile_path
    workflow.builder.df_dir = str(tmpdir)

    runner = ExitPluginsRunner(
        None,
        workflow,
        [{
            'name': StoreMetadataInOSv3Plugin.key,
            "args": {
                "url": "http://example.com/"
            }
        }]
    )
    output = runner.run()
    assert StoreMetadataInOSv3Plugin.key in output
    labels = output[StoreMetadataInOSv3Plugin.key]["labels"]
    assert "koji-build-id" not in labels
Beispiel #46
0
    def __init__(self, source, image, **kwargs):
        """
        """
        LastLogger.__init__(self)
        BuilderStateMachine.__init__(self)

        print_version_of_tools()

        self.tasker = DockerTasker()

        info, version = self.tasker.get_info(), self.tasker.get_version()
        logger.debug(json.dumps(info, indent=2))
        logger.info(json.dumps(version, indent=2))

        # arguments for build
        self.source = source
        self.base_image_id = None
        self.image_id = None
        self.built_image_info = None
        self.image = ImageName.parse(image)

        # get info about base image from dockerfile
        self.df_path, self.df_dir = self.source.get_dockerfile_path()
        self.set_base_image(df_parser(self.df_path).baseimage)
        logger.debug("base image specified in dockerfile = '%s'",
                     self.base_image)
        if not self.base_image.tag:
            self.base_image.tag = 'latest'
Beispiel #47
0
    def build(self):
        """
        build image inside current environment;
        it's expected this may run within (privileged) docker container

        :return: image string (e.g. fedora-python:34)
        """
        try:
            logger.info("building image '%s' inside current environment",
                        self.image)
            self._ensure_not_built()
            logger.debug("using dockerfile:\n%s",
                         df_parser(self.df_path).content)
            logs_gen = self.tasker.build_image_from_path(
                self.df_dir,
                self.image,
            )
            logger.debug("build is submitted, waiting for it to finish")
            command_result = wait_for_command(
                logs_gen)  # wait for build to finish
            logger.info(
                "build %s!",
                'failed' if command_result.is_failed() else 'succeeded')
            self.is_built = True
            if not command_result.is_failed():
                self.built_image_info = self.get_built_image_info()
                # self.base_image_id = self.built_image_info['ParentId']  # parent id is not base image!
                self.image_id = self.built_image_info['Id']
            build_result = BuildResult(command_result, self.image_id)
            return build_result
        except:
            logger.exception("build failed")
            return ExceptionBuildResult()
    def run(self):
        """
        run the plugin
        """
        yum_repos = {k: v for k, v in self.workflow.files.items() if k.startswith(YUM_REPOS_DIR)}
        if self.wrap_commands:
            wrap_yum_commands(yum_repos, self.workflow.builder.df_path)
        else:
            if not yum_repos:
                return
            # absolute path in containers -> relative path within context
            repos_host_cont_mapping = {}
            host_repos_path = os.path.join(self.workflow.builder.df_dir, RELATIVE_REPOS_PATH)
            self.log.info("creating directory for yum repos: %s", host_repos_path)
            os.mkdir(host_repos_path)

            for repo, repo_content in self.workflow.files.items():
                repo_basename = os.path.basename(repo)
                repo_relative_path = os.path.join(RELATIVE_REPOS_PATH, repo_basename)
                repo_host_path = os.path.join(host_repos_path, repo_basename)
                self.log.info("writing repo to '%s'", repo_host_path)
                with open(repo_host_path, "wb") as fp:
                    fp.write(repo_content.encode("utf-8"))
                self.log.debug("%s\n%s", repo, repo_content.strip())
                repos_host_cont_mapping[repo] = repo_relative_path

            # Find out the USER inherited from the base image
            inspect = self.workflow.builder.inspect_base_image()
            inherited_user = inspect[INSPECT_CONFIG].get('User', '')
            df = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
            df.lines = add_yum_repos_to_dockerfile(repos_host_cont_mapping,
                                                   df, inherited_user)
def test_add_labels_plugin_explicit(tmpdir, docker_tasker, auto_label, labels_docker, labels_base):
    df = df_parser(str(tmpdir))
    df.content = labels_docker

    if MOCK:
        mock_docker()

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X)
    flexmock(workflow, source=MockSource())
    flexmock(workflow, base_image_inspect=labels_base)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)

    prov_labels = {}
    prov_labels[auto_label] = 'explicit_value'

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddLabelsPlugin.key,
            'args': {'labels': prov_labels, "dont_overwrite": [], "auto_labels": [auto_label],
                     'aliases': {'Build_Host': 'com.redhat.build-host'}}
        }]
    )

    runner.run()

    assert df.labels[auto_label] == 'explicit_value'
 def run(self):
     """
     try open dockerfile, output an error if there is one
     """
     try:
         return df_parser(self.workflow.builder.df_path, workflow=self.workflow).content
     except (IOError, OSError) as ex:
         return "Couldn't retrieve dockerfile: %r" % ex
def test_add_labels_aliases(tmpdir, docker_tasker, caplog,
                            df_old_as_plugin_arg, df_new_as_plugin_arg,
                            base_old, base_new, df_old, df_new, exp_old, exp_new, exp_log):
    if MOCK:
        mock_docker()

    df_content = "FROM fedora\n"
    plugin_labels = {}
    if df_old:
        if df_old_as_plugin_arg:
            plugin_labels["label_old"] = df_old
        else:
            df_content += 'LABEL label_old="{0}"\n'.format(df_old)
    if df_new:
        if df_new_as_plugin_arg:
            plugin_labels["label_new"] = df_new
        else:
            df_content += 'LABEL label_new="{0}"\n'.format(df_new)

    base_labels = {INSPECT_CONFIG: {"Labels": {}}}
    if base_old:
        base_labels[INSPECT_CONFIG]["Labels"]["label_old"] = base_old
    if base_new:
        base_labels[INSPECT_CONFIG]["Labels"]["label_new"] = base_new

    df = df_parser(str(tmpdir))
    df.content = df_content

    workflow = DockerBuildWorkflow(MOCK_SOURCE, 'test-image')
    setattr(workflow, 'builder', X)
    flexmock(workflow, base_image_inspect=base_labels)
    setattr(workflow.builder, 'df_path', df.dockerfile_path)

    runner = PreBuildPluginsRunner(
        docker_tasker,
        workflow,
        [{
            'name': AddLabelsPlugin.key,
            'args': {
                'labels': plugin_labels,
                'dont_overwrite': [],
                'auto_labels': [],
                'aliases': {"label_old": "label_new"},
            }
        }]
    )

    runner.run()
    assert AddLabelsPlugin.key is not None
    result_old = df.labels.get("label_old") or \
        base_labels[INSPECT_CONFIG]["Labels"].get("label_old")
    result_new = df.labels.get("label_new") or \
        base_labels[INSPECT_CONFIG]["Labels"].get("label_new")
    assert result_old == exp_old
    assert result_new == exp_new

    if exp_log:
        assert exp_log in caplog.text()
    def run(self):
        """
        run the plugin
        """

        parser = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        release_labels = get_all_label_keys('release')
        dockerfile_labels = parser.labels
        if any(release_label in dockerfile_labels
               for release_label in release_labels):
            self.log.debug("release set explicitly so not incrementing")
            return

        component_label = get_preferred_label_key(dockerfile_labels,
                                                  'com.redhat.component')
        try:
            component = dockerfile_labels[component_label]
        except KeyError:
            raise RuntimeError("missing label: {}".format(component_label))

        version_label = get_preferred_label_key(dockerfile_labels, 'version')
        try:
            version = dockerfile_labels[version_label]
        except KeyError:
            raise RuntimeError('missing label: {}'.format(version_label))

        build_info = {'name': component, 'version': version}
        self.log.debug('getting next release from build info: %s', build_info)
        next_release = self.xmlrpc.getNextRelease(build_info)

        # getNextRelease will return the release of the last successful build
        # but next_release might be a failed build. Koji's CGImport doesn't
        # allow reuploading builds, so instead we should increment next_release
        # and make sure the build doesn't exist
        while True:
            build_info = {'name': component, 'version': version, 'release': next_release}
            self.log.debug('checking that the build does not exist: %s', build_info)
            build = self.xmlrpc.getBuild(build_info)
            if not build:
                break

            next_release = str(int(next_release) + 1)

        # Always set preferred release label - other will be set if old-style
        # label is present
        preferred_release_label = get_preferred_label_key(dockerfile_labels,
                                                         'release')
        old_style_label = get_all_label_keys('com.redhat.component')[1]
        release_labels_to_be_set = [preferred_release_label]
        if old_style_label in dockerfile_labels.keys():
            release_labels_to_be_set = release_labels

        # No release labels are set so set them
        for release_label in release_labels_to_be_set:
            self.log.info("setting %s=%s", release_label, next_release)

            # Write the label back to the file (this is a property setter)
            dockerfile_labels[release_label] = next_release
 def _get_component_label(self):
     """Get value of Dockerfile label that is to be used as `global_component` to query
     PDC release-components API endpoint.
     """
     labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels
     if self.pdc_component_df_label not in labels:
         raise PluginFailedException('No %s label in Dockerfile, can\'t get PDC component',
                                     self.pdc_component_df_label)
     return labels[self.pdc_component_df_label]
 def run(self):
     """
     run the plugin
     """
     labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels
     for label in self.required_labels:
         if labels.get(label) is None:
             msg = "Dockerfile is missing '{0}' label.".format(label)
             self.log.error(msg)
             raise AssertionError(msg)