def run(self):
        """
        run the plugin
        """
        if self.workflow.builder.base_from_scratch:
            self.log.info(
                "Skipping check and set rebuild: unsupported for FROM-scratch images"
            )
            return False

        metadata = get_build_json().get("metadata", {})
        self.build_labels = metadata.get("labels", {})
        buildconfig = self.build_labels["buildconfig"]
        is_rebuild = self.build_labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            osbs = get_openshift_session(self.workflow,
                                         self.openshift_fallback)
            new_labels = {self.label_key: self.label_value}
            osbs.update_labels_on_build_config(buildconfig, new_labels)
        else:
            self.pull_latest_commit_if_configured()

        return is_rebuild
    def run(self):
        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not importing failed build")
            return

        if is_scratch_build(self.workflow):
            self.log.info('scratch build, skipping plugin')
            return

        if not self.imagestream_name:
            self.log.info('no imagestream provided, skipping plugin')
            return

        self.floating_images = get_floating_images(self.workflow)
        if not self.floating_images:
            self.log.info('No floating tags to import, skipping import_image')
            return

        self.resolve_docker_image_repo()

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.get_or_create_imagestream()

        self.osbs.import_image_tags(self.imagestream_name, self.get_trackable_tags(),
                                    self.docker_image_repo, insecure=self.insecure_registry)
示例#3
0
    def __init__(self, tasker, workflow, url=None, verify_ssl=True,
                 use_auth=True, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: ContainerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiImportBase, self).__init__(tasker, workflow)

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth}
        }

        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.build_id = None
        self.koji_task_id = None
        self.session = None
        self.reserve_build = get_koji(self.workflow).get('reserve_build', False)
        self.delegate_enabled = get_koji(self.workflow).get('delegate_task', True)
        self.rebuild = is_rebuild(self.workflow)
    def run(self):
        """
        run the plugin
        """
        if self.workflow.builder.base_from_scratch:
            self.log.info("Skipping check and set rebuild: unsupported for FROM-scratch images")
            return False
        if self.workflow.builder.custom_base_image:
            self.log.info("Skipping check and set rebuild: unsupported for custom base images")
            return False

        metadata = get_build_json().get("metadata", {})
        self.build_labels = metadata.get("labels", {})
        buildconfig = self.build_labels["buildconfig"]
        is_rebuild = self.build_labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            osbs = get_openshift_session(self.workflow, self.openshift_fallback)
            new_labels = {self.label_key: self.label_value}
            osbs.update_labels_on_build_config(buildconfig, new_labels)
        else:
            self.pull_latest_commit_if_configured()

        return is_rebuild
    def __init__(self,
                 tasker,
                 workflow,
                 kojihub=None,
                 url=None,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 blocksize=None,
                 target=None,
                 poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiImportPlugin, self).__init__(tasker, workflow)

        self.koji_fallback = {
            'hub_url': kojihub,
            'auth': {
                'proxyuser': koji_proxy_user,
                'ssl_certs_dir': koji_ssl_certs,
                'krb_principal': str(koji_principal),
                'krb_keytab_path': str(koji_keytab)
            }
        }

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {
                'enable': use_auth
            }
        }

        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.osbs = get_openshift_session(self.workflow,
                                          self.openshift_fallback)
        self.build_id = None
    def __init__(self, tasker, workflow, koji_upload_dir, kojihub=None, url=None,
                 build_json_dir=None, verify_ssl=True, use_auth=True,
                 koji_ssl_certs_dir=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 blocksize=None, prefer_schema1_digest=True,
                 platform='x86_64', report_multiple_digests=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param koji_upload_dir: str, path to use when uploading to hub
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs_dir: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        :param prefer_schema1_digest: bool, when True, v2 schema 1 digest will
            be preferred as the built image digest
        :param platform: str, platform name for this build
        :param report_multiple_digests: bool, whether to report both schema 1
            and schema 2 digests; if truthy, prefer_schema1_digest is ignored
        """
        super(KojiUploadPlugin, self).__init__(tasker, workflow)

        self.koji_fallback = {
            'hub_url': kojihub,
            'auth': {
                'proxyuser': koji_proxy_user,
                'ssl_certs_dir': koji_ssl_certs_dir,
                'krb_principal': str(koji_principal),
                'krb_keytab_path': str(koji_keytab)
            }
        }

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth},
            'build_json_dir': build_json_dir
        }

        self.blocksize = blocksize
        self.koji_upload_dir = koji_upload_dir
        self.prefer_schema1_digest = get_prefer_schema1_digest(self.workflow, prefer_schema1_digest)
        self.report_multiple_digests = report_multiple_digests

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.build_id = None
        self.pullspec_image = None
        self.platform = platform
    def __init__(self, tasker, workflow, koji_upload_dir, kojihub=None, url=None,
                 build_json_dir=None, verify_ssl=True, use_auth=True,
                 koji_ssl_certs_dir=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 blocksize=None, prefer_schema1_digest=True,
                 platform='x86_64', report_multiple_digests=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param koji_upload_dir: str, path to use when uploading to hub
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs_dir: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        :param prefer_schema1_digest: bool, when True, v2 schema 1 digest will
            be preferred as the built image digest
        :param platform: str, platform name for this build
        :param report_multiple_digests: bool, whether to report both schema 1
            and schema 2 digests; if truthy, prefer_schema1_digest is ignored
        """
        super(KojiUploadPlugin, self).__init__(tasker, workflow)

        self.koji_fallback = {
            'hub_url': kojihub,
            'auth': {
                'proxyuser': koji_proxy_user,
                'ssl_certs_dir': koji_ssl_certs_dir,
                'krb_principal': str(koji_principal),
                'krb_keytab_path': str(koji_keytab)
            }
        }

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth},
            'build_json_dir': build_json_dir
        }

        self.blocksize = blocksize
        self.koji_upload_dir = koji_upload_dir
        self.prefer_schema1_digest = get_prefer_schema1_digest(self.workflow, prefer_schema1_digest)
        self.report_multiple_digests = report_multiple_digests

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.build_id = None
        self.pullspec_image = None
        self.platform = platform
    def run(self):
        """
        run the plugin
        """
        if self.delegate_enabled:
            # will be used in koji_import
            self.workflow.triggered_after_koji_task = self.triggered_after_koji_task

        task_running = False
        koji_task_id = self.metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id:
            task_info = self.kojisession.getTaskInfo(koji_task_id,
                                                     request=True)
            if task_info:
                task_running = koji.TASK_STATES[task_info['state']] == 'OPEN'
            else:
                self.log.warning(
                    "koji-task-id label on build, doesn't exist in koji")
        else:
            self.log.warning("koji-task-id label doesn't exist on build")

        if not self.delegate_enabled:
            self.log.info("delegate_task not enabled, skipping plugin")
            return
        elif not is_rebuild(self.workflow):
            self.log.info("not autorebuild, skipping plugin")
            return
        elif (self.triggered_after_koji_task and task_running):
            # The buildConfig will already have triggered_after_koji_task in user_params
            # after the first autorebuild performed with the delegating feature enabled.
            # If koji-task-id for the build is a running task,
            # it means it is a new, already delegated task
            self.log.info("koji task already delegated, skipping plugin")
            return

        self.osbs = get_openshift_session(self.workflow, NO_FALLBACK)

        # Do not run exit plugins. Especially sendmail
        self.workflow.exit_plugins_conf = []

        if self.workflow.cancel_isolated_autorebuild:  # this is set by the koji_parent plugin
            self.log.info(
                "ignoring isolated build for autorebuild, the build will be cancelled"
            )
            self.cancel_build()
            raise BuildCanceledException("Build was canceled")

        self.delegate_task()

        # We cancel the build so it does not inerfere with real failed builds
        self.cancel_build()
        self.log.info('Build was delegated, the build will be cancelled')
        raise BuildCanceledException("Build was canceled")
    def run(self):
        """
        run the plugin
        """
        if self.delegate_enabled:
            # will be used in koji_import
            self.workflow.triggered_after_koji_task = self.triggered_after_koji_task

        task_running = False
        koji_task_id = self.metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id:
            task_info = self.kojisession.getTaskInfo(koji_task_id,
                                                     request=True)
            if task_info:
                task_running = koji.TASK_STATES[task_info['state']] == 'OPEN'
            else:
                self.log.warning(
                    "koji-task-id label on build, doesn't exist in koji")
        else:
            self.log.warning("koji-task-id label doesn't exist on build")

        # we don't want to plugin continue when:
        # delegate_task isn't enabled
        # build isn't autorebuild
        # triggered_after_koji_task was provided, but task is running,
        # reason for this is, when we once enable delegating, after first autorebuild
        # buildConfig will already have triggered_after_koji_task in user_params
        # so when koji-task-id for build is running task, that means it is that new
        # already delegated task
        if not self.delegate_enabled:
            self.log.info("delegate_task not enabled, skipping plugin")
            return
        elif not is_rebuild(self.workflow):
            self.log.info("not autorebuild, skipping plugin")
            return
        elif (self.triggered_after_koji_task and task_running):
            self.log.info("koji task already delegated, skipping plugin")
            return

        self.osbs = get_openshift_session(self.workflow, NO_FALLBACK)

        self.delegate_task()

        # we will remove all exit plugins, as we don't want any of them running,
        # mainly sendmail
        self.workflow.exit_plugins_conf = []
        # we will cancel build and raise exception,
        # without canceling build build would end up as failed build, and we don't want
        # to have this build as failed but cancelled so it doesn't inerfere with real failed builds
        self.cancel_build()
        self.log.info('Build was delegated, will cancel itself')
        raise BuildCanceledException("Build was canceled")
    def __init__(self, tasker, workflow, kojihub=None, url=None,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.koji_fallback = {
            'hub_url': kojihub,
            'auth': {
                'proxyuser': koji_proxy_user,
                'ssl_certs_dir': koji_ssl_certs,
                'krb_principal': str(koji_principal),
                'krb_keytab_path': str(koji_keytab)
            }
        }

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth}
        }

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.build_id = None
        self.pullspec_image = None
示例#11
0
    def run(self):
        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not importing failed build")
            return

        self.osbs = get_openshift_session(self.workflow,
                                          self.openshift_fallback)
        self.get_or_create_imagestream()
        self.process_tags()
        try:
            self.osbs.import_image(self.imagestream_name,
                                   tags=self.get_trackable_tags())
        except TypeError:
            self.log.info('Falling back to calling import_image without tags')
            self.osbs.import_image(self.imagestream_name)
示例#12
0
    def run(self):
        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not importing failed build")
            return

        self.primary_images = get_primary_images(self.workflow)
        if not self.primary_images:
            raise RuntimeError('Could not find primary images in workflow')

        self.resolve_docker_image_repo()

        self.osbs = get_openshift_session(self.workflow,
                                          self.openshift_fallback)
        self.get_or_create_imagestream()
        self.process_tags()
        try:
            self.osbs.import_image(self.imagestream_name,
                                   tags=self.get_trackable_tags())
        except TypeError:
            self.log.info('Falling back to calling import_image without tags')
            self.osbs.import_image(self.imagestream_name)
    def run(self):
        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not importing failed build")
            return

        self.floating_images = get_floating_images(self.workflow)
        if not self.floating_images:
            raise RuntimeError('Could not find floating images in workflow')

        self.resolve_docker_image_repo()

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.get_or_create_imagestream()

        try:
            self.osbs.import_image_tags(self.imagestream_name, self.get_trackable_tags(),
                                        self.docker_image_repo, insecure=self.insecure_registry)
        except AttributeError:
            self.log.info('Falling back to calling import_image instead of import_image_tags')
            self.process_tags()
            self.osbs.import_image(self.imagestream_name, tags=self.get_trackable_tags())
示例#14
0
    def run(self):
        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not importing failed build")
            return

        self.floating_images = get_floating_images(self.workflow)
        if not self.floating_images:
            self.log.info('No floating tags to import, skipping import_image')
            return

        self.resolve_docker_image_repo()

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.get_or_create_imagestream()

        try:
            self.osbs.import_image_tags(self.imagestream_name, self.get_trackable_tags(),
                                        self.docker_image_repo, insecure=self.insecure_registry)
        except AttributeError:
            self.log.info('Falling back to calling import_image instead of import_image_tags')
            self.process_tags()
            self.osbs.import_image(self.imagestream_name, tags=self.get_trackable_tags())
    def __init__(self, tasker, workflow, koji_upload_dir, url=None,
                 build_json_dir=None, verify_ssl=True, use_auth=True,
                 blocksize=None,
                 platform='x86_64', report_multiple_digests=False):
        """
        constructor

        :param tasker: ContainerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param url: string, URL for OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param koji_upload_dir: str, path to use when uploading to hub
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param blocksize: int, blocksize to use for uploading files
        :param platform: str, platform name for this build
        :param report_multiple_digests: bool, whether to report both schema 1
            and schema 2 digests
        """
        super(KojiUploadPlugin, self).__init__(tasker, workflow)

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth},
            'build_json_dir': build_json_dir
        }

        self.blocksize = blocksize
        self.koji_upload_dir = koji_upload_dir
        self.report_multiple_digests = report_multiple_digests

        self.osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        self.build_id = None
        self.pullspec_image = None
        self.platform = platform
示例#16
0
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)

        if not self.source_build:
            try:
                commit_id = self.workflow.source.commit_id
            except AttributeError:
                commit_id = ""

            base_image = self.workflow.builder.dockerfile_images.original_base_image
            if (base_image is not None and not self.workflow.builder.
                    dockerfile_images.base_from_scratch):
                base_image_name = base_image
                try:
                    base_image_id = self.workflow.builder.base_image_inspect.get(
                        'Id', "")
                except KeyError:
                    base_image_id = ""
            else:
                base_image_name = ""
                base_image_id = ""

            parent_images_strings = self.workflow.builder.parent_images_to_str(
            )
            if self.workflow.builder.dockerfile_images.base_from_scratch:
                parent_images_strings[SCRATCH_FROM] = SCRATCH_FROM

            try:
                with open(self.workflow.builder.df_path) as f:
                    dockerfile_contents = f.read()
            except AttributeError:
                dockerfile_contents = ""

        annotations = {
            'repositories': json.dumps(self.get_repositories()),
            'digests': json.dumps(self.get_pullspecs(self.get_digests())),
            'plugins-metadata': json.dumps(self.get_plugin_metadata()),
            'filesystem': json.dumps(self.get_filesystem_metadata()),
        }

        if self.source_build:
            annotations['image-id'] = ''
            if self.workflow.koji_source_manifest:
                annotations['image-id'] = self.workflow.koji_source_manifest[
                    'config']['digest']
        else:
            annotations['dockerfile'] = dockerfile_contents
            annotations['commit_id'] = commit_id
            annotations['base-image-id'] = base_image_id
            annotations['base-image-name'] = base_image_name
            annotations['image-id'] = self.workflow.builder.image_id or ''
            annotations['parent_images'] = json.dumps(parent_images_strings)

        media_types = []

        media_results = self.workflow.exit_results.get(PLUGIN_VERIFY_MEDIA_KEY)
        if isinstance(media_results, Exception):
            media_results = None

        if media_results:
            media_types += media_results

        if media_types:
            annotations['media-types'] = json.dumps(
                sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get(
                "md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get(
                "sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size":
                tar_size,
                "md5sum":
                tar_md5sum,
                "sha256sum":
                tar_sha256sum,
                "filename":
                os.path.basename(tar_path),
            })

        self.apply_remote_source_annotations(annotations)

        annotations.update(self.get_config_map())

        self.apply_plugin_annotations(annotations)
        self.apply_build_result_annotations(annotations)
        self.set_koji_task_annotations_whitelist(annotations)

        try:
            osbs.update_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
    def _fetch_log_files(self):
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        build_id = get_build_json()['metadata']['name'] or {}
        osbs_logs = OSBSLogs(self.log)

        return osbs_logs.get_log_files(osbs, build_id)
    def openshift_session(self):
        if not self._openshift_session:
            self._openshift_session = get_openshift_session(
                self.workflow, self.openshift_fallback)

        return self._openshift_session
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        if hasattr(self.workflow.builder, "original_base_image"):
            base_image = self.workflow.builder.original_base_image
        else:
            base_image = self.workflow.builder.base_image
        if base_image is not None:
            base_image_name = base_image.to_str()
            try:
                base_image_id = self.workflow.base_image_inspect['Id']
            except KeyError:
                base_image_id = ""
        else:
            base_image_name = ""
            base_image_id = ""

        try:
            with open(self.workflow.builder.df_path) as f:
                dockerfile_contents = f.read()
        except AttributeError:
            dockerfile_contents = ""

        annotations = {
            "dockerfile": dockerfile_contents,
            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": base_image_name,
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "parent_images": json.dumps(self.workflow.builder.parent_images),
            "plugins-metadata": json.dumps(self.get_plugin_metadata()),
            "filesystem": json.dumps(self.get_filesystem_metadata()),
        }

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(
                help_result, dict
        ) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                annotations['help_file'] = json.dumps(None)
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                annotations['help_file'] = json.dumps(help_result['help_file'])
            else:
                self.log.error("Unknown result from add_help plugin: %s",
                               help_result)

        pulp_push_results = self.workflow.postbuild_results.get(
            PLUGIN_PULP_PUSH_KEY)
        if pulp_push_results:
            top_layer, _ = pulp_push_results
            annotations['v1-image-id'] = top_layer

        media_types = []
        if pulp_push_results:
            media_types += [MEDIA_TYPE_DOCKER_V1]

        # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin
        pulp_pull_results = (
            self.workflow.postbuild_results.get(PulpPullPlugin.key)
            or self.workflow.exit_results.get(PulpPullPlugin.key))
        if isinstance(pulp_pull_results, Exception):
            pulp_pull_results = None

        if pulp_pull_results:
            media_types += pulp_pull_results

        if media_types:
            annotations['media-types'] = json.dumps(
                sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get(
                "md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get(
                "sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size":
                tar_size,
                "md5sum":
                tar_md5sum,
                "sha256sum":
                tar_sha256sum,
                "filename":
                os.path.basename(tar_path),
            })

        annotations.update(self.get_config_map())

        self.apply_build_result_annotations(annotations)

        # For arrangement version 4 onwards (where group_manifests
        # runs in the orchestrator build), restore the repositories
        # metadata which orchestrate_build adjusted.
        if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results:
            annotations['repositories'] = json.dumps(self.get_repositories())
        try:
            osbs.update_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
    def test_get_openshift_session(self, fallback, build_json_dir, config, raise_error):
        tasker, workflow = self.prepare()
        workflow.plugin_workspace[ReactorConfigPlugin.key] = {}

        if build_json_dir:
            config += "      build_json_dir: " + build_json_dir

        if raise_error:
            with pytest.raises(Exception):
                read_yaml(config, 'schemas/config.json')
            return
        config_json = read_yaml(config, 'schemas/config.json')

        auth_info = {
            'openshift_url': config_json['openshift']['url'],
            'verify_ssl': not config_json['openshift'].get('insecure', False),
            'use_auth': False,
            'conf_file': None,
            'namespace': 'namespace',
            'build_json_dir': build_json_dir
        }
        if config_json['openshift'].get('auth'):
            if config_json['openshift']['auth'].get('krb_keytab_path'):
                auth_info['kerberos_keytab'] =\
                    config_json['openshift']['auth'].get('krb_keytab_path')
            if config_json['openshift']['auth'].get('krb_principal'):
                auth_info['kerberos_principal'] =\
                    config_json['openshift']['auth'].get('krb_principal')
            if config_json['openshift']['auth'].get('krb_cache_path'):
                auth_info['kerberos_ccache'] =\
                    config_json['openshift']['auth'].get('krb_cache_path')
            if config_json['openshift']['auth'].get('ssl_certs_dir'):
                auth_info['client_cert'] =\
                    os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'cert')
                auth_info['client_key'] =\
                    os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'key')
            auth_info['use_auth'] = config_json['openshift']['auth'].get('enable', False)

        fallback_map = {}
        if fallback:
            fallback_map = {'url': config_json['openshift']['url'],
                            'insecure': config_json['openshift'].get('insecure', False),
                            'build_json_dir': build_json_dir}
            if config_json['openshift'].get('auth'):
                fallback_map['auth'] = {}
                fallback_map['auth']['krb_keytab_path'] =\
                    config_json['openshift']['auth'].get('krb_keytab_path')
                fallback_map['auth']['krb_principal'] =\
                    config_json['openshift']['auth'].get('krb_principal')

                fallback_map['auth']['enable'] =\
                    config_json['openshift']['auth'].get('enable', False)
                fallback_map['auth']['krb_cache_path'] =\
                    config_json['openshift']['auth'].get('krb_cache_path')
                fallback_map['auth']['ssl_certs_dir'] =\
                    config_json['openshift']['auth'].get('ssl_certs_dir')
        else:
            workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
                ReactorConfig(config_json)

        (flexmock(osbs.conf.Configuration)
            .should_call('__init__')
            .with_args(**auth_info)
            .once())
        (flexmock(osbs.api.OSBS)
            .should_call('__init__')
            .once())
        flexmock(os, environ={'BUILD': '{"metadata": {"namespace": "namespace"}}'})

        get_openshift_session(workflow, fallback_map)
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        if hasattr(self.workflow.builder, "original_base_image"):
            base_image = self.workflow.builder.original_base_image
        else:
            base_image = self.workflow.builder.base_image
        if base_image is not None and not self.workflow.builder.base_from_scratch:
            base_image_name = base_image.to_str()
            try:
                base_image_id = self.workflow.builder.base_image_inspect['Id']
            except KeyError:
                base_image_id = ""
        else:
            base_image_name = ""
            base_image_id = ""

        try:
            with open(self.workflow.builder.df_path) as f:
                dockerfile_contents = f.read()
        except AttributeError:
            dockerfile_contents = ""

        parent_images_strings = self.workflow.builder.parent_images_to_str()
        if self.workflow.builder.base_from_scratch:
            parent_images_strings[SCRATCH_FROM] = SCRATCH_FROM

        annotations = {
            "dockerfile": dockerfile_contents,
            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": base_image_name,
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "parent_images": json.dumps(parent_images_strings),
            "plugins-metadata": json.dumps(self.get_plugin_metadata()),
            "filesystem": json.dumps(self.get_filesystem_metadata()),
        }

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                annotations['help_file'] = json.dumps(None)
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                annotations['help_file'] = json.dumps(help_result['help_file'])
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        pulp_push_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PUSH_KEY)
        if pulp_push_results:
            top_layer, _ = pulp_push_results
            annotations['v1-image-id'] = top_layer

        media_types = []
        if pulp_push_results:
            media_types += [MEDIA_TYPE_DOCKER_V1]

        # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin
        # verify_media_results runs if pulp_pull does not
        media_results = (self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY) or
                         self.workflow.exit_results.get(PLUGIN_PULP_PULL_KEY) or
                         self.workflow.exit_results.get(PLUGIN_VERIFY_MEDIA_KEY))
        if isinstance(media_results, Exception):
            media_results = None

        if media_results:
            media_types += media_results

        if media_types:
            annotations['media-types'] = json.dumps(sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })

        annotations.update(self.get_config_map())

        self.apply_build_result_annotations(annotations)

        # For arrangement version 4 onwards (where group_manifests
        # runs in the orchestrator build), restore the repositories
        # metadata which orchestrate_build adjusted.
        if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results:
            annotations['repositories'] = json.dumps(self.get_repositories())
        try:
            osbs.update_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
示例#22
0
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)

        if not self.source_build:
            try:
                commit_id = self.workflow.source.commit_id
            except AttributeError:
                commit_id = ""

            if hasattr(self.workflow.builder, "original_base_image"):
                base_image = self.workflow.builder.original_base_image
            else:
                base_image = self.workflow.builder.base_image
            if base_image is not None and not self.workflow.builder.base_from_scratch:
                base_image_name = base_image.to_str()
                try:
                    base_image_id = self.workflow.builder.base_image_inspect[
                        'Id']
                except KeyError:
                    base_image_id = ""
            else:
                base_image_name = ""
                base_image_id = ""

            try:
                with open(self.workflow.builder.df_path) as f:
                    dockerfile_contents = f.read()
            except AttributeError:
                dockerfile_contents = ""

            parent_images_strings = self.workflow.builder.parent_images_to_str(
            )
            if self.workflow.builder.base_from_scratch:
                parent_images_strings[SCRATCH_FROM] = SCRATCH_FROM

        annotations = {
            'repositories': json.dumps(self.get_repositories()),
            'digests': json.dumps(self.get_pullspecs(self.get_digests())),
            'plugins-metadata': json.dumps(self.get_plugin_metadata()),
            'filesystem': json.dumps(self.get_filesystem_metadata()),
        }
        extra_labels = {}

        if self.source_build:
            source_result = self.workflow.prebuild_results[
                PLUGIN_FETCH_SOURCES_KEY]
            extra_labels['sources_for_nvr'] = source_result['sources_for_nvr']
            annotations['image-id'] = ''
            if self.workflow.koji_source_manifest:
                annotations['image-id'] = self.workflow.koji_source_manifest[
                    'config']['digest']
        else:
            annotations['dockerfile'] = dockerfile_contents
            annotations['commit_id'] = commit_id
            annotations['base-image-id'] = base_image_id
            annotations['base-image-name'] = base_image_name
            annotations['image-id'] = self.workflow.builder.image_id or ''
            annotations['parent_images'] = json.dumps(parent_images_strings)

            help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
            if (isinstance(help_result, dict) and 'help_file' in help_result
                    and 'status' in help_result):
                if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                    annotations['help_file'] = json.dumps(None)
                elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                    annotations['help_file'] = json.dumps(
                        help_result['help_file'])
                else:
                    self.log.error("Unknown result from add_help plugin: %s",
                                   help_result)

        media_types = []

        media_results = self.workflow.exit_results.get(PLUGIN_VERIFY_MEDIA_KEY)
        if isinstance(media_results, Exception):
            media_results = None

        if media_results:
            media_types += media_results

        if media_types:
            annotations['media-types'] = json.dumps(
                sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get(
                "md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get(
                "sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size":
                tar_size,
                "md5sum":
                tar_md5sum,
                "sha256sum":
                tar_sha256sum,
                "filename":
                os.path.basename(tar_path),
            })

        self.apply_remote_source_annotations(annotations)

        annotations.update(self.get_config_map())

        self.apply_plugin_annotations(annotations)
        self.apply_build_result_annotations(annotations)
        self.set_koji_task_annotations_whitelist(annotations)

        # For arrangement version 4 onwards (where group_manifests
        # runs in the orchestrator build), restore the repositories
        # metadata which orchestrate_build adjusted.
        if not self.source_build:
            if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results:
                annotations['repositories'] = json.dumps(
                    self.get_repositories())

        try:
            osbs.update_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels(extra_labels=extra_labels)
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
示例#23
0
    def _fetch_log_files(self):
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        build_id = get_build_json()['metadata']['name'] or {}
        osbs_logs = OSBSLogs(self.log)

        return osbs_logs.get_log_files(osbs, build_id)
    def test_get_openshift_session(self, fallback, build_json_dir, config,
                                   raise_error):
        tasker, workflow = self.prepare()
        workflow.plugin_workspace[ReactorConfigPlugin.key] = {}

        if build_json_dir:
            config += "      build_json_dir: " + build_json_dir

        if raise_error:
            with pytest.raises(Exception):
                read_yaml(config, 'schemas/config.json')
            return
        config_json = read_yaml(config, 'schemas/config.json')

        auth_info = {
            'openshift_url': config_json['openshift']['url'],
            'verify_ssl': not config_json['openshift'].get('insecure', False),
            'use_auth': False,
            'conf_file': None,
            'namespace': 'namespace',
            'build_json_dir': build_json_dir
        }
        if config_json['openshift'].get('auth'):
            if config_json['openshift']['auth'].get('krb_keytab_path'):
                auth_info['kerberos_keytab'] =\
                    config_json['openshift']['auth'].get('krb_keytab_path')
            if config_json['openshift']['auth'].get('krb_principal'):
                auth_info['kerberos_principal'] =\
                    config_json['openshift']['auth'].get('krb_principal')
            if config_json['openshift']['auth'].get('krb_cache_path'):
                auth_info['kerberos_ccache'] =\
                    config_json['openshift']['auth'].get('krb_cache_path')
            if config_json['openshift']['auth'].get('ssl_certs_dir'):
                auth_info['client_cert'] =\
                    os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'cert')
                auth_info['client_key'] =\
                    os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'key')
            auth_info['use_auth'] = config_json['openshift']['auth'].get(
                'enable', False)

        fallback_map = {}
        if fallback:
            fallback_map = {
                'url': config_json['openshift']['url'],
                'insecure': config_json['openshift'].get('insecure', False),
                'build_json_dir': build_json_dir
            }
            if config_json['openshift'].get('auth'):
                fallback_map['auth'] = {}
                fallback_map['auth']['krb_keytab_path'] =\
                    config_json['openshift']['auth'].get('krb_keytab_path')
                fallback_map['auth']['krb_principal'] =\
                    config_json['openshift']['auth'].get('krb_principal')

                fallback_map['auth']['enable'] =\
                    config_json['openshift']['auth'].get('enable', False)
                fallback_map['auth']['krb_cache_path'] =\
                    config_json['openshift']['auth'].get('krb_cache_path')
                fallback_map['auth']['ssl_certs_dir'] =\
                    config_json['openshift']['auth'].get('ssl_certs_dir')
        else:
            workflow.plugin_workspace[ReactorConfigPlugin.key][WORKSPACE_CONF_KEY] =\
                ReactorConfig(config_json)

        (flexmock(osbs.conf.Configuration).should_call('__init__').with_args(
            **auth_info).once())
        (flexmock(osbs.api.OSBS).should_call('__init__').once())
        flexmock(os,
                 environ={'BUILD': '{"metadata": {"namespace": "namespace"}}'})

        get_openshift_session(workflow, fallback_map)
    def openshift_session(self):
        if not self._openshift_session:
            self._openshift_session = get_openshift_session(self.workflow, self.openshift_fallback)

        return self._openshift_session