def test_check_is_rebuild(self, caplog, tmpdir, monkeypatch,
                              reactor_config_map, base_from_scratch,
                              custom_base, from_latest):
        key = 'is_autorebuild'
        value = 'true'

        workflow, runner = self.prepare(tmpdir,
                                        key,
                                        value,
                                        reactor_config_map=reactor_config_map,
                                        base_from_scratch=base_from_scratch,
                                        custom_base=custom_base)

        monkeypatch.setenv(
            "BUILD",
            json.dumps({
                "metadata": {
                    "labels": {
                        "buildconfig": "buildconfig1",
                        key: value,
                        'git-branch': 'the-branch',
                    }
                }
            }))

        mock_times = 1 if from_latest and not (base_from_scratch
                                               or custom_base) else 0
        (flexmock(workflow.source).should_call('reset').times(
            mock_times).with_args('origin/the-branch'))

        (flexmock(build_orchestrate_build).should_receive(
            'override_build_kwarg').times(mock_times).with_args(
                workflow, 'git_ref', 'HEAD-OF-origin/the-branch'))

        workflow.source.config.autorebuild = dict(from_latest=from_latest)

        runner.run()
        if base_from_scratch:
            assert workflow.prebuild_results[
                CheckAndSetRebuildPlugin.key] is False
            assert not is_rebuild(workflow)
            log_msg = "Skipping check and set rebuild: unsupported for FROM-scratch images"
            assert log_msg in caplog.text
        elif custom_base:
            assert workflow.prebuild_results[
                CheckAndSetRebuildPlugin.key] is False
            assert not is_rebuild(workflow)
            log_msg = "Skipping check and set rebuild: unsupported for custom base images"
            assert log_msg in caplog.text
        else:
            assert workflow.prebuild_results[
                CheckAndSetRebuildPlugin.key] is True
            assert is_rebuild(workflow)
    def test_check_is_rebuild(self, tmpdir, monkeypatch, reactor_config_map, from_latest):
        key = 'is_autorebuild'
        value = 'true'

        workflow, runner = self.prepare(tmpdir, key, value, reactor_config_map=reactor_config_map)

        monkeypatch.setenv("BUILD", json.dumps({
            "metadata": {
                "labels": {
                    "buildconfig": "buildconfig1",
                    key: value,
                    'git-branch': 'the-branch',
                }
            }
        }))

        (flexmock(workflow.source)
            .should_call('reset')
            .times(1 if from_latest is True else 0)
            .with_args('origin/the-branch'))

        (flexmock(build_orchestrate_build)
            .should_receive('override_build_kwarg')
            .times(1 if from_latest is True else 0)
            .with_args(workflow, 'git_ref', 'HEAD-OF-origin/the-branch'))

        workflow.source.config.autorebuild = dict(from_latest=from_latest)

        runner.run()
        assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] is True
        assert is_rebuild(workflow)
Exemple #3
0
def test_check_is_not_rebuild(namespace, monkeypatch):
    key = 'is_autorebuild'
    value = 'true'
    buildconfig = "buildconfig1"
    namespace_dict = {}
    if namespace is not None:
        namespace_dict["namespace"] = namespace

    workflow, runner = prepare(key,
                               value,
                               set_labels_args=(buildconfig, {
                                   key: value
                               }),
                               set_labels_kwargs=namespace_dict)

    build_json = {
        "metadata": {
            "labels": {
                "buildconfig": buildconfig,
                key: "false",
            }
        }
    }

    build_json["metadata"].update(namespace_dict)
    monkeypatch.setenv("BUILD", json.dumps(build_json))
    runner.run()
    assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] == False
    assert not is_rebuild(workflow)
    def run(self):
        # verify that given states are subset of allowed states
        unknown_states = set(self.send_on) - self.allowed_states
        if len(unknown_states) > 0:
            raise PluginFailedException('Unknown state(s) "%s" for sendmail plugin' %
                                        '", "'.join(sorted(unknown_states)))

        rebuild = is_rebuild(self.workflow)
        success = not self.workflow.build_failed
        canceled = self.workflow.autorebuild_canceled

        self.log.info('checking conditions for sending notification ...')
        if self._should_send(rebuild, success, canceled):
            self.log.info('notification about build result will be sent')
            subject, body = self._render_mail(rebuild, success, canceled)
            try:
                self.log.debug('getting list of receivers for this component ...')
                receivers = self._get_receivers_list()
            except RuntimeError as e:
                self.log.error('couldn\'t get list of receivers, sending error message ...')
                # TODO: maybe improve the error message/subject
                body = '\n'.join([
                    'Failed to get contact for %s, error: %s' % (str(self.workflow.image), str(e)),
                    'Since your address is in "error_addresses", this email was sent to you to '
                    'take action on this.',
                    'Wanted to send following mail:',
                    '',
                    body
                ])
                receivers = self.error_addresses
            self.log.info('sending notification to %s ...', receivers)
            self._send_mail(receivers, subject, body)
        else:
            self.log.info('conditions for sending notification not met, doing nothing')
    def test_check_is_not_rebuild(self, tmpdir, namespace, monkeypatch, reactor_config_map,
                                  user_params):
        key = 'is_autorebuild'
        value = 'true'
        buildconfig = "buildconfig1"
        namespace_dict = {}
        if namespace is not None:
            namespace_dict["namespace"] = namespace

        workflow, runner = self.prepare(tmpdir, key, value,
                                        update_labels_args=(buildconfig,
                                                            {key: value}),
                                        update_labels_kwargs=namespace_dict,
                                        reactor_config_map=reactor_config_map)

        build_json = {
            "metadata": {
                "labels": {
                    "buildconfig": buildconfig,
                    key: "false",
                }
            }
        }

        build_json["metadata"].update(namespace_dict)
        monkeypatch.setenv("BUILD", json.dumps(build_json))
        runner.run()
        assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] is False
        assert not is_rebuild(workflow)
    def test_check_is_not_rebuild(self, namespace, monkeypatch):
        key = 'is_autorebuild'
        value = 'true'
        buildconfig = "buildconfig1"
        namespace_dict = {}
        if namespace is not None:
            namespace_dict["namespace"] = namespace

        workflow, runner = self.prepare(key, value,
                                        set_labels_args=(buildconfig,
                                                         {key: value}),
                                        set_labels_kwargs=namespace_dict)

        build_json = {
            "metadata": {
                "labels": {
                    "buildconfig": buildconfig,
                    key: "false",
                }
            }
        }
 
        build_json["metadata"].update(namespace_dict)
        monkeypatch.setenv("BUILD", json.dumps(build_json))
        runner.run()
        assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] == False
        assert not is_rebuild(workflow)
    def run(self):
        # verify that given states are subset of allowed states
        unknown_states = set(self.send_on) - self.allowed_states
        if len(unknown_states) > 0:
            raise PluginFailedException('Unknown state(s) "%s" for sendmail plugin' %
                                        '", "'.join(sorted(unknown_states)))

        rebuild = is_rebuild(self.workflow)
        success = not self.workflow.build_failed
        canceled = self.workflow.autorebuild_canceled

        self.log.info('checking conditions for sending notification ...')
        if self._should_send(rebuild, success, canceled):
            self.log.info('notification about build result will be sent')
            subject, body = self._render_mail(rebuild, success, canceled)
            try:
                self.log.debug('getting list of receivers for this component ...')
                receivers = self._get_receivers_list()
            except RuntimeError as e:
                self.log.error('couldn\'t get list of receivers, sending error message ...')
                # TODO: maybe improve the error message/subject
                body = '\n'.join([
                    'Failed to get contact for %s, error: %s' % (str(self.workflow.image), str(e)),
                    'Since your address is in "error_addresses", this email was sent to you to '
                    'take action on this.',
                    'Wanted to send following mail:',
                    '',
                    body
                ])
                receivers = self.error_addresses
            self.log.info('sending notification to %s ...', receivers)
            self._send_mail(receivers, subject, body)
        else:
            self.log.info('conditions for sending notification not met, doing nothing')
Exemple #8
0
    def should_run(self):
        """
        Check if the plugin should run or skip execution.

        :return: bool, False if plugin should skip execution
        """
        if not self.is_in_orchestrator():
            self.log.warning("%s plugin set to run on worker. Skipping",
                             self.key)
            return False

        if not get_omps_config(self.workflow, None):
            self.log.info("Integration with OMPS is not configured. Skipping")
            return False

        if not has_operator_manifest(self.workflow):
            self.log.info("Not an operator build. Skipping")
            return False

        if is_scratch_build():
            self.log.info('Scratch build. Skipping')
            return False

        if is_rebuild(self.workflow):
            self.log.info('Autorebuild. Skipping')
            return False

        if is_isolated_build():
            self.log.info('Isolated build. Skipping')
            return False

        return True
Exemple #9
0
    def __init__(self, tasker, workflow, url=None, verify_ssl=True,
                 use_auth=True, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: ContainerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiImportBase, self).__init__(tasker, workflow)

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth}
        }

        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.build_id = None
        self.koji_task_id = None
        self.session = None
        self.reserve_build = get_koji(self.workflow).get('reserve_build', False)
        self.delegate_enabled = get_koji(self.workflow).get('delegate_task', True)
        self.rebuild = is_rebuild(self.workflow)
    def test_check_is_rebuild(self, caplog, tmpdir, monkeypatch, reactor_config_map,
                              base_from_scratch, custom_base, from_latest):
        key = 'is_autorebuild'
        value = 'true'

        workflow, runner = self.prepare(tmpdir, key, value, reactor_config_map=reactor_config_map,
                                        base_from_scratch=base_from_scratch,
                                        custom_base=custom_base)

        monkeypatch.setenv("BUILD", json.dumps({
            "metadata": {
                "labels": {
                    "buildconfig": "buildconfig1",
                    key: value,
                    'git-branch': 'the-branch',
                }
            }
        }))

        mock_times = 1 if from_latest and not (base_from_scratch or custom_base) else 0
        (flexmock(workflow.source)
            .should_call('reset')
            .times(mock_times)
            .with_args('origin/the-branch'))

        (flexmock(build_orchestrate_build)
            .should_receive('override_build_kwarg')
            .times(mock_times)
            .with_args(workflow, 'git_ref', 'HEAD-OF-origin/the-branch'))

        workflow.source.config.autorebuild = dict(from_latest=from_latest)

        runner.run()
        if base_from_scratch:
            assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] is False
            assert not is_rebuild(workflow)
            log_msg = "Skipping check and set rebuild: unsupported for FROM-scratch images"
            assert log_msg in caplog.text
        elif custom_base:
            assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] is False
            assert not is_rebuild(workflow)
            log_msg = "Skipping check and set rebuild: unsupported for custom base images"
            assert log_msg in caplog.text
        else:
            assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] is True
            assert is_rebuild(workflow)
Exemple #11
0
    def run(self):
        if is_scratch_build(self.workflow):
            self.log.info('scratch build, skipping plugin')
            return

        if not (self.workflow.builder.dockerfile_images.base_from_scratch
                or self.workflow.builder.dockerfile_images.custom_base_image):
            self._base_image_nvr = self.detect_parent_image_nvr(
                self.workflow.builder.dockerfile_images.base_image,
                inspect_data=self.workflow.builder.base_image_inspect,
            )
            if is_rebuild(self.workflow):
                self.ignore_isolated_autorebuilds()

        manifest_mismatches = []
        for img, local_tag in self.workflow.builder.dockerfile_images.items():
            if base_image_is_custom(img.to_str()):
                continue

            nvr = self.detect_parent_image_nvr(
                local_tag) if local_tag else None
            self._parent_builds[img] = self.wait_for_parent_image_build(
                nvr) if nvr else None
            if nvr == self._base_image_nvr:
                self._base_image_build = self._parent_builds[img]

            if self._parent_builds[img]:
                # we need the possible floating tag
                check_img = copy(local_tag)
                check_img.tag = img.tag
                try:
                    self.check_manifest_digest(check_img,
                                               self._parent_builds[img])
                except ValueError as exc:
                    manifest_mismatches.append(exc)
            else:
                err_msg = (
                    'Could not get koji build info for parent image {}. '
                    'Was this image built in OSBS?'.format(img.to_str()))
                if get_skip_koji_check_for_base_image(self.workflow,
                                                      fallback=False):
                    self.log.warning(err_msg)
                else:
                    self.log.error(err_msg)
                    raise RuntimeError(err_msg)

        if manifest_mismatches:
            mismatch_msg = (
                'Error while comparing parent images manifest digests in koji with '
                'related values from registries: %s')
            if get_fail_on_digest_mismatch(self.workflow, fallback=True):
                self.log.error(mismatch_msg, manifest_mismatches)
                raise RuntimeError(mismatch_msg % manifest_mismatches)

            self.log.warning(mismatch_msg, manifest_mismatches)
        return self.make_result()
 def run(self):
     if is_rebuild(self.workflow):
         self.log.info('this is an autorebuild, determining whether to skip it')
         if not self._is_rebuild_enabled():
             self.log.info('autorebuild is disabled, %s is interrupting the build', self.key)
             raise AutoRebuildCanceledException(self.key, 'autorebuild is disabled')
         else:
             self.log.info('autorebuild is enabled, %s is doing nothing', self.key)
     else:
         self.log.info('this is not an autorebuild, %s is doing nothing', self.key)
Exemple #13
0
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path,
                           workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error(
                    "%s: expected filesystem-koji-task-id in result",
                    AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r",
                                   fs_task_id,
                                   exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        self.set_help(extra, worker_metadatas)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        return build
    def run(self):
        """
        run the plugin
        """
        if self.delegate_enabled:
            # will be used in koji_import
            self.workflow.triggered_after_koji_task = self.triggered_after_koji_task

        task_running = False
        koji_task_id = self.metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id:
            task_info = self.kojisession.getTaskInfo(koji_task_id,
                                                     request=True)
            if task_info:
                task_running = koji.TASK_STATES[task_info['state']] == 'OPEN'
            else:
                self.log.warning(
                    "koji-task-id label on build, doesn't exist in koji")
        else:
            self.log.warning("koji-task-id label doesn't exist on build")

        if not self.delegate_enabled:
            self.log.info("delegate_task not enabled, skipping plugin")
            return
        elif not is_rebuild(self.workflow):
            self.log.info("not autorebuild, skipping plugin")
            return
        elif (self.triggered_after_koji_task and task_running):
            # The buildConfig will already have triggered_after_koji_task in user_params
            # after the first autorebuild performed with the delegating feature enabled.
            # If koji-task-id for the build is a running task,
            # it means it is a new, already delegated task
            self.log.info("koji task already delegated, skipping plugin")
            return

        self.osbs = get_openshift_session(self.workflow, NO_FALLBACK)

        # Do not run exit plugins. Especially sendmail
        self.workflow.exit_plugins_conf = []

        if self.workflow.cancel_isolated_autorebuild:  # this is set by the koji_parent plugin
            self.log.info(
                "ignoring isolated build for autorebuild, the build will be cancelled"
            )
            self.cancel_build()
            raise BuildCanceledException("Build was canceled")

        self.delegate_task()

        # We cancel the build so it does not inerfere with real failed builds
        self.cancel_build()
        self.log.info('Build was delegated, the build will be cancelled')
        raise BuildCanceledException("Build was canceled")
    def run(self):
        """
        run the plugin
        """
        if self.delegate_enabled:
            # will be used in koji_import
            self.workflow.triggered_after_koji_task = self.triggered_after_koji_task

        task_running = False
        koji_task_id = self.metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id:
            task_info = self.kojisession.getTaskInfo(koji_task_id,
                                                     request=True)
            if task_info:
                task_running = koji.TASK_STATES[task_info['state']] == 'OPEN'
            else:
                self.log.warning(
                    "koji-task-id label on build, doesn't exist in koji")
        else:
            self.log.warning("koji-task-id label doesn't exist on build")

        # we don't want to plugin continue when:
        # delegate_task isn't enabled
        # build isn't autorebuild
        # triggered_after_koji_task was provided, but task is running,
        # reason for this is, when we once enable delegating, after first autorebuild
        # buildConfig will already have triggered_after_koji_task in user_params
        # so when koji-task-id for build is running task, that means it is that new
        # already delegated task
        if not self.delegate_enabled:
            self.log.info("delegate_task not enabled, skipping plugin")
            return
        elif not is_rebuild(self.workflow):
            self.log.info("not autorebuild, skipping plugin")
            return
        elif (self.triggered_after_koji_task and task_running):
            self.log.info("koji task already delegated, skipping plugin")
            return

        self.osbs = get_openshift_session(self.workflow, NO_FALLBACK)

        self.delegate_task()

        # we will remove all exit plugins, as we don't want any of them running,
        # mainly sendmail
        self.workflow.exit_plugins_conf = []
        # we will cancel build and raise exception,
        # without canceling build build would end up as failed build, and we don't want
        # to have this build as failed but cancelled so it doesn't inerfere with real failed builds
        self.cancel_build()
        self.log.info('Build was delegated, will cancel itself')
        raise BuildCanceledException("Build was canceled")
    def get_worker_build_kwargs(self, release, platform, koji_upload_dir,
                                task_id):
        build_kwargs = deepcopy(self.build_kwargs)

        build_kwargs.pop('architecture', None)

        build_kwargs['release'] = release
        build_kwargs['platform'] = platform
        build_kwargs['koji_upload_dir'] = koji_upload_dir
        build_kwargs['is_auto'] = is_rebuild(self.workflow)
        if task_id:
            build_kwargs['filesystem_koji_task_id'] = task_id

        return build_kwargs
    def get_worker_build_kwargs(self, release, platform, koji_upload_dir,
                                task_id):
        build_kwargs = deepcopy(self.build_kwargs)

        build_kwargs.pop('architecture', None)

        build_kwargs['release'] = release
        build_kwargs['platform'] = platform
        build_kwargs['koji_upload_dir'] = koji_upload_dir
        build_kwargs['is_auto'] = is_rebuild(self.workflow)
        if task_id:
            build_kwargs['filesystem_koji_task_id'] = task_id

        return build_kwargs
 def run(self):
     if is_rebuild(self.workflow):
         self.log.info(
             'this is an autorebuild, determining whether to skip it')
         if not self._is_rebuild_enabled():
             self.log.info(
                 'autorebuild is disabled, %s is interrupting the build',
                 self.key)
             raise AutoRebuildCanceledException(self.key,
                                                'autorebuild is disabled')
         else:
             self.log.info('autorebuild is enabled, %s is doing nothing',
                           self.key)
     else:
         self.log.info('this is not an autorebuild, %s is doing nothing',
                       self.key)
    def test_check_is_rebuild(self, monkeypatch):
        key = 'is_autorebuild'
        value = 'true'
        workflow, runner = self.prepare(key, value)

        monkeypatch.setenv("BUILD", json.dumps({
            "metadata": {
                "labels": {
                    "buildconfig": "buildconfig1",
                    key: value,
                }
            }
        }))
        runner.run()
        assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] == True
        assert is_rebuild(workflow)
    def test_check_is_rebuild(self, monkeypatch):
        key = 'is_autorebuild'
        value = 'true'
        workflow, runner = self.prepare(key, value)

        monkeypatch.setenv("BUILD", json.dumps({
            "metadata": {
                "labels": {
                    "buildconfig": "buildconfig1",
                    key: value,
                }
            }
        }))
        runner.run()
        assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] is True
        assert is_rebuild(workflow)
def test_check_is_rebuild():
    key = 'is_autorebuild'
    value = 'true'
    workflow, runner = prepare(key, value)

    os.environ["BUILD"] = json.dumps({
        "metadata": {
            "labels": {
                "buildconfig": "buildconfig1",
                key: value,
            }
        }
    })
    runner.run()
    assert workflow.prebuild_results[CheckAndSetRebuildPlugin.key] == True
    assert is_rebuild(workflow)
Exemple #22
0
    def adjust_for_autorebuild(self):
        """Ignore pre-filled signing_intent and compose_ids for autorebuids

        Auto rebuilds are expected to use a known configuration. The parameters
        signing_intent and compose_ids are meant for one-off build calls. This
        method ensure that these parameters are ignored for autorebuilds.
        """
        if not is_rebuild(self.workflow):
            return

        if self.signing_intent:
            self.log.info('Autorebuild detected: Ignoring signing_intent plugin parameter')
            self.signing_intent = None

        if self.compose_ids:
            self.log.info('Autorebuild detected: Ignoring compose_ids plugin parameter')
            self.compose_ids = tuple()
    def run(self):
        """
        run the plugin
        """

        if self.workflow.build_process_failed:
            self.log.info("Build already failed, not incrementing release")
            return

        # Ensure we can use the git repository already checked out for us
        source = self.workflow.source
        assert isinstance(source, GitSource)
        repo = GitRepo(source.get(), self.log)

        # Note: when this plugin is configured by osbs-client,
        # source.git_commit (the Build's source git ref) comes from
        # --git-branch not --git-commit. The value from --git-commit
        # went into our self.git_ref.
        branch = source.git_commit
        try:
            branch_sha = repo.git(['rev-parse', branch])
        except subprocess.CalledProcessError:
            self.log.error("Branch '%s' not found in git repo",
                           source.git_commit)
            raise RuntimeError("Branch '%s' not found" % branch)

        # We checked out the right branch
        assert repo.git(['rev-parse', 'HEAD']) == branch_sha

        # We haven't reset it to an earlier commit
        remote = repo.git(['config', '--get',
                           'branch.{branch}.remote'.format(branch=branch)])
        upstream = '{remote}/{branch}'.format(remote=remote, branch=branch)
        upstream_sha = repo.git(['rev-parse', upstream])
        assert branch_sha == upstream_sha

        if is_rebuild(self.workflow):
            self.log.info("Incrementing release label")
            self.bump(repo, remote)
        else:
            self.log.info("Verifying branch is at specified commit")
            self.verify_branch(branch, branch_sha)
    def run(self):
        """
        run the plugin
        """

        if self.workflow.build_process_failed:
            self.log.info("Build already failed, not incrementing release")
            return

        # Ensure we can use the git repository already checked out for us
        source = self.workflow.source
        assert isinstance(source, GitSource)
        with GitRepo(source.get(), self.log) as repo:
            # Note: when this plugin is configured by osbs-client,
            # source.git_commit (the Build's source git ref) comes from
            # --git-branch not --git-commit. The value from --git-commit
            # went into our self.git_ref.
            branch = source.git_commit
            try:
                branch_sha = repo.git(['rev-parse', branch])
            except subprocess.CalledProcessError:
                self.log.error("Branch '%s' not found in git repo",
                               source.git_commit)
                raise RuntimeError("Branch '%s' not found" % branch)

            # We checked out the right branch
            assert repo.git(['rev-parse', 'HEAD']) == branch_sha

            # We haven't reset it to an earlier commit
            remote = repo.git(['config', '--get',
                               'branch.{branch}.remote'.format(branch=branch)])
            upstream = '{remote}/{branch}'.format(remote=remote, branch=branch)
            upstream_sha = repo.git(['rev-parse', upstream])
            assert branch_sha == upstream_sha

            if is_rebuild(self.workflow):
                self.log.info("Incrementing release label")
                self.bump(repo, remote)
            else:
                self.log.info("Verifying branch is at specified commit")
                self.verify_branch(branch, branch_sha)
    def get_worker_build_kwargs(self, release, platform, koji_upload_dir,
                                task_id, worker_openshift):
        build_kwargs = deepcopy(self.build_kwargs)

        build_kwargs.pop('architecture', None)

        build_kwargs['release'] = release
        build_kwargs['platform'] = platform
        build_kwargs['koji_upload_dir'] = koji_upload_dir
        build_kwargs['is_auto'] = is_rebuild(self.workflow)
        if task_id:
            build_kwargs['filesystem_koji_task_id'] = task_id

        if not self.reactor_config.is_default():
            worker_reactor_conf = deepcopy(self.reactor_config.conf)
            worker_reactor_conf['openshift'] = worker_openshift
            worker_reactor_conf.pop('worker_token_secrets', None)
            self._update_content_versions(worker_reactor_conf)

            build_kwargs['reactor_config_override'] = worker_reactor_conf

        return build_kwargs
    def get_worker_build_kwargs(self, release, platform, koji_upload_dir,
                                task_id, worker_openshift):
        build_kwargs = deepcopy(self.build_kwargs)

        build_kwargs.pop('architecture', None)

        build_kwargs['release'] = release
        build_kwargs['platform'] = platform
        build_kwargs['koji_upload_dir'] = koji_upload_dir
        build_kwargs['is_auto'] = is_rebuild(self.workflow)
        if task_id:
            build_kwargs['filesystem_koji_task_id'] = task_id

        if not self.reactor_config.is_default():
            worker_reactor_conf = deepcopy(self.reactor_config.conf)
            worker_reactor_conf['openshift'] = worker_openshift
            worker_reactor_conf.pop('worker_token_secrets', None)
            self._update_content_versions(worker_reactor_conf, platform)

            build_kwargs['reactor_config_override'] = worker_reactor_conf

        return build_kwargs
    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        if not is_rebuild(self.workflow):
            self.log.info("Not promoting to koji: not a rebuild")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        session.CGImport(koji_metadata, server_dir)

        self.log.debug("Submitted with metadata: %s",
                       json.dumps(koji_metadata, sort_keys=True, indent=4))
Exemple #28
0
    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab)
                or (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        if not is_rebuild(self.workflow):
            self.log.info("Not promoting to koji: not a rebuild")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        session.CGImport(koji_metadata, server_dir)

        self.log.debug("Submitted with metadata: %s",
                       json.dumps(koji_metadata, sort_keys=True, indent=4))
    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build
    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(
            df_parser(self.workflow.builder.df_path,
                      workflow=self.workflow).labels)

        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {
            'image': {
                'autorebuild': is_rebuild(self.workflow)
            },
            'submitter': self.koji_session.getLoggedInUser().get('name'),
        }

        koji_task_owner = None
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = koji_task_id = int(
                    koji_task_id)
                koji_task_owner = get_koji_task_owner(self.koji_session,
                                                      koji_task_id).get('name')
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error(
                    "%s: expected filesystem-koji-task-id in result",
                    AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r",
                                   fs_task_id,
                                   exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(
            PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(
                set(pulp_pull_results)))

        # append parent builds and parent_build_id from koji parent
        extra['image'].update(get_parent_image_koji_data(self.workflow))

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(
                help_result, dict
        ) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s",
                               help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        resolve_comp_result = self.workflow.prebuild_results.get(
            PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids':
                [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent':
                resolve_comp_result['signing_intent'],
                'signing_intent_overridden':
                resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build
Exemple #31
0
    def run(self):
        """
        run the plugin
        """
        # source container build
        if PLUGIN_FETCH_SOURCES_KEY in self.workflow.prebuild_results:
            source_nvr = self.get_source_build_nvr(
                scratch=is_scratch_build(self.workflow))
            self.log.info("Setting source_build_nvr: %s", source_nvr)
            self.workflow.koji_source_nvr = source_nvr

            if self.reserve_build and not is_scratch_build(self.workflow):
                self.reserve_build_in_koji(source_nvr['name'],
                                           source_nvr['version'],
                                           source_nvr['release'],
                                           None,
                                           None,
                                           source_build=True)

            return

        parser = df_parser(self.workflow.builder.df_path,
                           workflow=self.workflow)
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)
        missing_labels = {}
        missing_value = 'missing'
        empty_value = 'empty'
        add_timestamp_to_release = (
            self.workflow.source.config.autorebuild.get(
                'add_timestamp_to_release', False)
            and is_rebuild(self.workflow))
        timestamp = utcnow().strftime('%Y%m%d%H%M%S')

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)
        try:
            component = dockerfile_labels[component_label]
        except KeyError:
            self.log.error("%s label: %s", missing_value, component_label)
            missing_labels[component_label] = missing_value

        version_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        try:
            version = dockerfile_labels[version_label]
            if not version:
                self.log.error('%s label: %s', empty_value, version_label)
                missing_labels[version_label] = empty_value
        except KeyError:
            self.log.error('%s label: %s', missing_value, version_label)
            missing_labels[version_label] = missing_value

        try:
            release_label, release = labels.get_name_and_value(
                Labels.LABEL_TYPE_RELEASE)
        except KeyError:
            release = None
        else:
            if not release:
                self.log.error('%s label: %s', empty_value, release_label)
                missing_labels[release_label] = empty_value

        if missing_labels:
            raise RuntimeError(
                'Required labels are missing or empty or using'
                ' undefined variables: {}'.format(missing_labels))

        # Always set preferred release label - other will be set if old-style
        # label is present
        release_label = labels.LABEL_NAMES[Labels.LABEL_TYPE_RELEASE][0]

        # Reserve build for isolated builds as well (or any build with supplied release)
        user_provided_release = self.workflow.user_params.get('release')
        if user_provided_release:
            if is_scratch_build(self.workflow):
                return

            self.check_build_existence_for_explicit_release(
                component, version, user_provided_release)

            if self.reserve_build:
                self.reserve_build_in_koji(component,
                                           version,
                                           user_provided_release,
                                           release_label,
                                           dockerfile_labels,
                                           user_provided_release=True)
            return

        if release:
            if not self.append:
                if add_timestamp_to_release:
                    release = '%s.%s' % (release, timestamp)
                    self.log.debug(
                        "autorebuild with add_timestamp_to_release and "
                        "release set explicitly, appending timestamp: %s",
                        timestamp)
                else:
                    self.log.debug(
                        "release set explicitly so not incrementing")

                if not is_scratch_build(self.workflow):
                    self.check_build_existence_for_explicit_release(
                        component, version, release)
                    dockerfile_labels[release_label] = release
                else:
                    return

        if not release or self.append:
            self.next_release_general(component, version, release,
                                      release_label, dockerfile_labels)

        if self.reserve_build and not is_scratch_build(self.workflow):
            self.reserve_build_in_koji(component, version, release,
                                       release_label, dockerfile_labels)
Exemple #32
0
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)
        extra = {'image': {}}

        if not self.source_build:
            labels = Labels(df_parser(self.workflow.builder.df_path,
                                      workflow=self.workflow).labels)
            _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
            _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
            _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

            source = self.workflow.source
            if not isinstance(source, GitSource):
                raise RuntimeError('git source required')

            extra['image']['autorebuild'] = is_rebuild(self.workflow)
            if self.workflow.triggered_after_koji_task:
                extra['image']['triggered_after_koji_task'] =\
                    self.workflow.triggered_after_koji_task

            try:
                isolated = str(metadata['labels']['isolated']).lower() == 'true'
            except (IndexError, AttributeError, KeyError):
                isolated = False
            self.log.info("build is isolated: %r", isolated)
            extra['image']['isolated'] = isolated

            fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
            if fs_result is not None:
                try:
                    fs_task_id = fs_result['filesystem-koji-task-id']
                except KeyError:
                    self.log.error("%s: expected filesystem-koji-task-id in result",
                                   AddFilesystemPlugin.key)
                else:
                    try:
                        task_id = int(fs_task_id)
                    except ValueError:
                        self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                    else:
                        extra['filesystem_koji_task_id'] = task_id

            extra['image'].update(get_parent_image_koji_data(self.workflow))

            flatpak_source_info = get_flatpak_source_info(self.workflow)
            if flatpak_source_info is not None:
                compose_info = get_compose_info(self.workflow)
                koji_metadata = compose_info.koji_metadata()
                koji_metadata['flatpak'] = True
                extra['image'].update(koji_metadata)

            resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
            if resolve_comp_result:
                extra['image']['odcs'] = {
                    'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                    'signing_intent': resolve_comp_result['signing_intent'],
                    'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
                }
            if self.workflow.all_yum_repourls:
                extra['image']['yum_repourls'] = self.workflow.all_yum_repourls

            self.set_help(extra, worker_metadatas)
            self.set_operators_metadata(extra, worker_metadatas)
            self.set_go_metadata(extra)
            self.set_group_manifest_info(extra, worker_metadatas)
        else:
            source_result = self.workflow.prebuild_results[PLUGIN_FETCH_SOURCES_KEY]
            extra['image']['sources_for_nvr'] = source_result['sources_for_nvr']
            extra['image']['sources_signing_intent'] = source_result['signing_intent']

        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        koji_task_owner = get_koji_task_owner(self.session, koji_task_id).get('name')
        extra['submitter'] = self.session.getLoggedInUser()['name']

        self.set_media_types(extra, worker_metadatas)

        build = {
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }
        if self.source_build:
            build.update({
                'name': self.workflow.koji_source_nvr['name'],
                'version': self.workflow.koji_source_nvr['version'],
                'release': self.workflow.koji_source_nvr['release'],
                'source': self.workflow.koji_source_source_url,
            })
        else:
            build.update({
                'name': component,
                'version': version,
                'release': release,
                'source': "{0}#{1}".format(source.uri, source.commit_id),
            })

        return build
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(
            df_parser(self.workflow.builder.df_path,
                      workflow=self.workflow).labels)
        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error(
                    "%s: expected filesystem-koji-task-id in result",
                    AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r",
                                   fs_task_id,
                                   exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        parent_id = self.get_parent_image_koji_build_id()
        if parent_id is not None:
            try:
                parent_id = int(parent_id)
            except ValueError:
                self.log.exception("invalid koji parent id %r", parent_id)
            else:
                extra.setdefault('image', {})
                extra['image']['parent_build_id'] = parent_id

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            extra['image'].update(flatpak_source_info.koji_metadata())

        koji_task_owner = get_koji_task_owner(self.session,
                                              koji_task_id).get('name')
        extra['submitter'] = self.session.getLoggedInUser()['name']

        resolve_comp_result = self.workflow.prebuild_results.get(
            PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids':
                [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent':
                resolve_comp_result['signing_intent'],
                'signing_intent_overridden':
                resolve_comp_result['signing_intent_overridden'],
            }

        self.set_help(extra, worker_metadatas)
        self.set_media_types(extra, worker_metadatas)
        self.remove_unavailable_manifest_digests(worker_metadatas)
        self.set_group_manifest_info(extra, worker_metadatas)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        return build
    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels)

        _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT)
        _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION)
        _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {
            'image': {'autorebuild': is_rebuild(self.workflow)},
            'submitter': self.koji_session.getLoggedInUser().get('name'),
        }

        koji_task_owner = None
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = koji_task_id = int(koji_task_id)
                koji_task_owner = get_koji_task_owner(self.koji_session, koji_task_id).get('name')
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(set(pulp_pull_results)))

        # append parent builds and parent_build_id from koji parent
        extra['image'].update(get_parent_image_koji_data(self.workflow))

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            compose_info = get_compose_info(self.workflow)
            koji_metadata = compose_info.koji_metadata()
            koji_metadata['flatpak'] = True
            extra['image'].update(koji_metadata)

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build
    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        parent_id = self.get_parent_image_koji_build_id()
        if parent_id is not None:
            try:
                parent_id = int(parent_id)
            except ValueError:
                self.log.exception("invalid koji parent id %r", parent_id)
            else:
                extra.setdefault('image', {})
                extra['image']['parent_build_id'] = parent_id

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            extra['image'].update(flatpak_source_info.koji_metadata())

        if koji_task_id:
            koji_task_owner = get_koji_task_owner(self.session, koji_task_id, default=None)['name']
        else:
            koji_task_owner = None
        extra['submitter'] = self.session.getLoggedInUser()['name']

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        self.set_help(extra, worker_metadatas)
        self.set_media_types(extra, worker_metadatas)
        self.remove_unavailable_manifest_digests(worker_metadatas)
        self.set_group_manifest_info(extra, worker_metadatas)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        return build