예제 #1
0
def test_get_build_json(environ, expected):
    flexmock(os, environ=environ)

    if expected:
        assert get_build_json() == {'foo': 'bar'}
    else:
        with pytest.raises(KeyError):
            get_build_json()
예제 #2
0
def test_get_build_json(environ, expected):
    flexmock(os, environ=environ)

    if expected:
        assert get_build_json() == {'foo': 'bar'}
    else:
        with pytest.raises(KeyError):
            get_build_json()
    def run(self):
        """
        run the plugin
        """
        if self.workflow.builder.base_from_scratch:
            self.log.info(
                "Skipping check and set rebuild: unsupported for FROM-scratch images"
            )
            return False

        metadata = get_build_json().get("metadata", {})
        self.build_labels = metadata.get("labels", {})
        buildconfig = self.build_labels["buildconfig"]
        is_rebuild = self.build_labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            osbs = get_openshift_session(self.workflow,
                                         self.openshift_fallback)
            new_labels = {self.label_key: self.label_value}
            osbs.update_labels_on_build_config(buildconfig, new_labels)
        else:
            self.pull_latest_commit_if_configured()

        return is_rebuild
예제 #4
0
    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.nvr_image = image
                break
        else:
            raise RuntimeError('Unable to determine name:version-release')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files
    def run(self):
        """
        run the plugin
        """
        if self.workflow.builder.base_from_scratch:
            self.log.info("Skipping check and set rebuild: unsupported for FROM-scratch images")
            return False
        if self.workflow.builder.custom_base_image:
            self.log.info("Skipping check and set rebuild: unsupported for custom base images")
            return False

        metadata = get_build_json().get("metadata", {})
        self.build_labels = metadata.get("labels", {})
        buildconfig = self.build_labels["buildconfig"]
        is_rebuild = self.build_labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            osbs = get_openshift_session(self.workflow, self.openshift_fallback)
            new_labels = {self.label_key: self.label_value}
            osbs.update_labels_on_build_config(buildconfig, new_labels)
        else:
            self.pull_latest_commit_if_configured()

        return is_rebuild
def get_openshift_session(workflow, fallback):
    config = get_openshift(workflow, fallback)
    namespace = get_build_json().get('metadata', {}).get('namespace', None)

    from osbs.api import OSBS
    from osbs.conf import Configuration

    config_kwargs = {
        'verify_ssl': not config.get('insecure', False),
        'namespace': namespace,
        'use_auth': False,
        'conf_file': None,
        'openshift_url': config['url'],
        'build_json_dir': config.get('build_json_dir')
    }

    if config.get('auth'):
        krb_keytab_path = config['auth'].get('krb_keytab_path')
        if krb_keytab_path:
            config_kwargs['kerberos_keytab'] = krb_keytab_path
        krb_principal = config['auth'].get('krb_principal')
        if krb_principal:
            config_kwargs['kerberos_principal'] = krb_principal
        krb_cache_path = config['auth'].get('krb_cache_path')
        if krb_cache_path:
            config_kwargs['kerberos_ccache'] = krb_cache_path
        ssl_certs_dir = config['auth'].get('ssl_certs_dir')
        if ssl_certs_dir:
            config_kwargs['client_cert'] = os.path.join(ssl_certs_dir, 'cert')
            config_kwargs['client_key'] = os.path.join(ssl_certs_dir, 'key')
        config_kwargs['use_auth'] = config['auth'].get('enable', False)

    osbs_conf = Configuration(**config_kwargs)
    return OSBS(osbs_conf, osbs_conf)
예제 #7
0
    def run(self):
        build_json = get_build_json()
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)

        user_params = os.environ['USER_PARAMS']
        user_data = validate_user_data(
            user_params, 'schemas/source_containers_user_params.json')
        arrangement_version = user_data.get('arrangement_version', None)
        plugins_json_serialized = get_plugins_with_user_data(
            user_params, user_data)
        # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too
        reactor_config_map = os.environ['REACTOR_CONFIG']
        self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json')

        if arrangement_version and arrangement_version <= 5:
            raise ValueError('arrangement_version <= 5 is no longer supported')

        # validate json before performing any changes
        read_yaml(plugins_json_serialized, 'schemas/plugins.json')
        self.plugins_json = json.loads(plugins_json_serialized)

        input_json = {
            'openshift_build_selflink':
            build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        self.assert_koji_integration()

        # validate after performing changes
        read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')

        return input_json
    def get_image_info_from_buildconfig(self):
        status = get_build_json().get("status", {})

        if 'config' not in status:
            return None, None

        config = status['config']
        if config['kind'] == 'BuildConfig':
            build_config_name = config['name']
        else:
            raise RuntimeError("Build config type isn't BuildConfig : %s" % config['kind'])

        try:
            build_config = self.openshift_session.os.get_build_config(build_config_name)
        except OsbsException:
            raise RuntimeError("Build config not found : %s" % build_config_name)

        try:
            build_from = build_config['spec']['strategy']['customStrategy']['from']
        except KeyError:
            raise RuntimeError("BuildConfig object is malformed")

        try:
            return self.process_image_from(build_from)
        except UnknownKindException:
            raise RuntimeError("BuildConfig object has unknown 'kind' %s" % build_from['kind'])
    def get_image_info_from_buildconfig(self):
        status = get_build_json().get("status", {})

        if 'config' not in status:
            return None, None

        config = status['config']
        if config['kind'] == 'BuildConfig':
            build_config_name = config['name']
        else:
            raise RuntimeError("Build config type isn't BuildConfig : %s" %
                               config['kind'])

        try:
            build_config = self.openshift_session.os.get_build_config(
                build_config_name)
        except OsbsException:
            raise RuntimeError("Build config not found : %s" %
                               build_config_name)

        try:
            build_from = build_config['spec']['strategy']['customStrategy'][
                'from']
        except KeyError:
            raise RuntimeError("BuildConfig object is malformed")

        try:
            return self.process_image_from(build_from)
        except UnknownKindException:
            raise RuntimeError("BuildConfig object has unknown 'kind' %s" %
                               build_from['kind'])
예제 #10
0
    def run(self):
        """
        Pull parent images and retag them uniquely for this build.
        """
        build_json = get_build_json()
        base_image_str = str(self.workflow.builder.original_base_image)
        current_platform = platform.processor() or 'x86_64'
        self.manifest_list_cache = {}
        for nonce, parent in enumerate(
                sorted(self.workflow.builder.parent_images.keys())):
            image = ImageName.parse(parent)
            if parent == base_image_str:
                image = self._resolve_base_image(build_json)
            image = self._ensure_image_registry(image)

            if self.check_platforms:
                self._validate_platforms_in_image(image)

                new_arch_image = self._get_image_for_different_arch(
                    image, current_platform)
                if new_arch_image:
                    image = new_arch_image

            new_image = self._pull_and_tag_image(image, build_json, str(nonce))
            self.workflow.builder.parent_images[parent] = str(new_image)

            if parent == base_image_str:
                self.workflow.builder.set_base_image(str(new_image))
예제 #11
0
    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.nvr_image = image
                break
        else:
            raise RuntimeError('Unable to determine name:version-release')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files
예제 #12
0
def get_openshift_session(workflow, fallback):
    config = get_openshift(workflow, fallback)
    namespace = get_build_json().get('metadata', {}).get('namespace', None)

    from osbs.api import OSBS
    from osbs.conf import Configuration

    config_kwargs = {
        'verify_ssl': not config.get('insecure', False),
        'namespace': namespace,
        'use_auth': False,
        'conf_file': None,
        'openshift_url': config['url'],
        'build_json_dir': config.get('build_json_dir')
    }

    if config.get('auth'):
        krb_keytab_path = config['auth'].get('krb_keytab_path')
        if krb_keytab_path:
            config_kwargs['kerberos_keytab'] = krb_keytab_path
        krb_principal = config['auth'].get('krb_principal')
        if krb_principal:
            config_kwargs['kerberos_principal'] = krb_principal
        krb_cache_path = config['auth'].get('krb_cache_path')
        if krb_cache_path:
            config_kwargs['kerberos_ccache'] = krb_cache_path
        ssl_certs_dir = config['auth'].get('ssl_certs_dir')
        if ssl_certs_dir:
            config_kwargs['client_cert'] = os.path.join(ssl_certs_dir, 'cert')
            config_kwargs['client_key'] = os.path.join(ssl_certs_dir, 'key')
        config_kwargs['use_auth'] = config['auth'].get('enable', False)

    osbs_conf = Configuration(**config_kwargs)
    return OSBS(osbs_conf, osbs_conf)
예제 #13
0
    def run(self):
        """
        each plugin has to implement this method -- it is used to run the plugin actually

        response from plugin is kept and used in json result response
        """
        user_params = None
        build_json = get_build_json()
        git_url = os.environ['SOURCE_URI']
        git_ref = os.environ.get('SOURCE_REF', None)
        image = os.environ['OUTPUT_IMAGE']
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)
        self.reactor_env = None

        git_commit_depth = None
        git_branch = None
        try:
            user_params = os.environ['USER_PARAMS']
            user_data = self.validate_user_data(user_params)
            git_commit_depth = user_data.get('git_commit_depth', None)
            git_branch = user_data.get('git_branch', None)
            self.plugins_json = self.get_plugins_with_user_data(user_params, user_data)
            # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too
            reactor_config_map = os.environ['REACTOR_CONFIG']
            self.reactor_env = read_yaml(reactor_config_map, 'schemas/config.json')
        except KeyError:
            try:
                self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS']
            except KeyError:
                raise RuntimeError("No plugin configuration found!")

        self.plugins_json = json.loads(self.plugins_json)
        # validate json before performing any changes
        read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')

        input_json = {
            'source': {
                'provider': 'git',
                'uri': git_url,
                'provider_params': {
                    'git_commit': git_ref,
                    'git_commit_depth': git_commit_depth,
                    'git_branch': git_branch,
                },
            },
            'image': image,
            'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        self.remove_plugins_without_parameters()
        # make sure the final json is valid
        read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')

        return input_json
예제 #14
0
 def setup_osbs_api(self):
     metadata = get_build_json().get("metadata", {})
     osbs_conf = Configuration(conf_file=None,
                               openshift_url=self.url,
                               use_auth=self.use_auth,
                               verify_ssl=self.verify_ssl,
                               build_json_dir=self.build_json_dir,
                               namespace=metadata.get('namespace', None))
     self.osbs = OSBS(osbs_conf, osbs_conf)
예제 #15
0
    def __init__(self,
                 tasker,
                 workflow,
                 kojihub,
                 url,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 metadata_only=False,
                 blocksize=None,
                 target=None,
                 poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata',
                                              {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=url,
                                  use_auth=use_auth,
                                  verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None
예제 #16
0
 def setup_osbs_api(self):
     metadata = get_build_json().get("metadata", {})
     osbs_conf = Configuration(conf_file=None,
                               openshift_url=self.url,
                               use_auth=self.use_auth,
                               verify_ssl=self.verify_ssl,
                               build_json_dir=self.build_json_dir,
                               namespace=metadata.get('namespace', None))
     self.osbs = OSBS(osbs_conf, osbs_conf)
예제 #17
0
    def run(self):
        """
        each plugin has to implement this method -- it is used to run the plugin actually

        response from plugin is kept and used in json result response
        """
        build_json = get_build_json()
        git_url = os.environ['SOURCE_URI']
        git_ref = os.environ.get('SOURCE_REF', None)
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)

        try:
            user_params = os.environ['USER_PARAMS']
            user_data = validate_user_data(user_params,
                                           'schemas/user_params.json')
            git_commit_depth = user_data.get('git_commit_depth', None)
            git_branch = user_data.get('git_branch', None)
            arrangement_version = user_data.get('arrangement_version', None)
            self.plugins_json = get_plugins_with_user_data(
                user_params, user_data)
            # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too
            reactor_config_map = os.environ['REACTOR_CONFIG']
            self.reactor_env = read_yaml(reactor_config_map,
                                         'schemas/config.json')
        except KeyError as exc:
            raise RuntimeError("No plugin configuration found!") from exc

        if arrangement_version and arrangement_version <= 5:
            raise ValueError('arrangement_version <= 5 is no longer supported')

        self.plugins_json = json.loads(self.plugins_json)
        # validate json before performing any changes
        read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')

        input_json = {
            'source': {
                'provider': 'git',
                'uri': git_url,
                'provider_params': {
                    'git_commit': git_ref,
                    'git_commit_depth': git_commit_depth,
                    'git_branch': git_branch,
                },
            },
            'openshift_build_selflink':
            build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        self.remove_plugins_without_parameters()
        # make sure the final json is valid
        read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')

        return input_json
예제 #18
0
    def __init__(self,
                 tasker,
                 workflow,
                 kojihub,
                 url,
                 build_json_dir,
                 koji_upload_dir,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs_dir=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 blocksize=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param koji_upload_dir: str, path to use when uploading to hub
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs_dir: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        """
        super(KojiUploadPlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs_dir = koji_ssl_certs_dir
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.blocksize = blocksize
        self.build_json_dir = build_json_dir
        self.koji_upload_dir = koji_upload_dir

        self.namespace = get_build_json().get('metadata',
                                              {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=url,
                                  use_auth=use_auth,
                                  verify_ssl=verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None
    def run(self):
        metadata = get_build_json().get("metadata", {})
        kwargs = {}

        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=self.url,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)
        imagestream = None
        try:
            imagestream = osbs.get_image_stream(self.imagestream)
        except OsbsResponseException:
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            imagestream = osbs.create_image_stream(self.imagestream,
                                                   self.docker_image_repo,
                                                   **kwargs)
        self.log.info("Importing new tags for %s", self.imagestream)

        primaries = None
        try:
            primaries = self.workflow.build_result.annotations['repositories'][
                'primary']
        except (TypeError, KeyError):
            self.log.exception(
                'Unable to read primary repositories annotations')

        if not primaries:
            raise RuntimeError('Could not find primary images in workflow')

        failures = False
        for s in primaries:
            tag_image_name = ImageName.parse(s)
            tag = tag_image_name.tag
            try:
                osbs.ensure_image_stream_tag(imagestream.json(), tag)
                self.log.info("Imported ImageStreamTag: (%s)", tag)
            except OsbsResponseException:
                failures = True
                self.log.info("Could not import ImageStreamTag: (%s)", tag)
        if failures:
            raise RuntimeError(
                "Failed to import ImageStreamTag(s). Check logs")

        osbs.import_image(self.imagestream)
    def run(self):
        """
        Pull parent images and retag them uniquely for this build.
        """
        self.manifest_list_cache.clear()

        build_json = get_build_json()
        digest_fetching_exceptions = []
        for nonce, parent in enumerate(
                self.workflow.builder.dockerfile_images.keys()):
            if base_image_is_custom(parent.to_str()):
                continue

            image = parent
            use_original_tag = False
            # base_image_key is an ImageName, so compare parent as an ImageName also
            if image == self.workflow.builder.dockerfile_images.base_image_key:
                use_original_tag = True
                image = self._resolve_base_image(build_json)

            self._ensure_image_registry(image)

            if self.check_platforms:
                # run only at orchestrator
                self._validate_platforms_in_image(image)
                try:
                    self._store_manifest_digest(
                        image, use_original_tag=use_original_tag)
                except RuntimeError as exc:
                    digest_fetching_exceptions.append(exc)

            image_with_digest = self._get_image_with_digest(image)
            if image_with_digest is None:
                self.log.warning(
                    "Cannot resolve manifest digest for image '%s'", image)
            else:
                self.log.info("Replacing image '%s' with '%s'", image,
                              image_with_digest)
                image = image_with_digest

            if not self.inspect_only:
                image = self._pull_and_tag_image(image, build_json, str(nonce))
            self.workflow.builder.dockerfile_images[parent] = image

        if digest_fetching_exceptions:
            raise RuntimeError(
                'Error when extracting parent images manifest digests: {}'.
                format(digest_fetching_exceptions))
        self.workflow.builder.parents_pulled = not self.inspect_only

        # generate configuration in builder for inspecting images
        self.workflow.builder.pull_registries = \
            {reg: {'insecure': reg_cli.insecure, 'dockercfg_path': reg_cli.dockercfg_path}
             for reg, reg_cli in self.registry_clients.items()}
    def get_current_buildimage(self):
        spec = get_build_json().get("spec")
        try:
            build_name = spec['strategy']['customStrategy']['from']['name']
            build_kind = spec['strategy']['customStrategy']['from']['kind']
        except KeyError:
            raise RuntimeError("Build object is malformed, failed to fetch buildroot image")

        if build_kind == 'DockerImage':
            return build_name
        else:
            raise RuntimeError("Build kind isn't 'DockerImage' but %s" % build_kind)
    def get_current_buildimage(self):
        spec = get_build_json().get("spec")
        try:
            build_name = spec['strategy']['customStrategy']['from']['name']
            build_kind = spec['strategy']['customStrategy']['from']['kind']
        except KeyError:
            raise RuntimeError("Build object is malformed, failed to fetch buildroot image")

        if build_kind == 'DockerImage':
            return build_name
        else:
            raise RuntimeError("Build kind isn't 'DockerImage' but %s" % build_kind)
예제 #23
0
    def run(self):
        """
        each plugin has to implement this method -- it is used to run the plugin actually

        response from plugin is kept and used in json result response
        """
        user_params = None
        build_json = get_build_json()
        git_url = os.environ['SOURCE_URI']
        git_ref = os.environ.get('SOURCE_REF', None)
        image = os.environ['OUTPUT_IMAGE']
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)
        self.reactor_env = None

        try:
            user_params = os.environ['USER_PARAMS']
            self.plugins_json = self.get_plugins_with_user_params(
                build_json, user_params)
            # if we get the USER_PARAMS, we'd better get the REACTOR_CONFIG too
            reactor_config_map = os.environ['REACTOR_CONFIG']
            self.reactor_env = read_yaml(reactor_config_map,
                                         'schemas/config.json')
        except KeyError:
            try:
                self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS']
            except KeyError:
                raise RuntimeError("No plugin configuration found!")

        self.plugins_json = json.loads(self.plugins_json)

        input_json = {
            'source': {
                'provider': 'git',
                'uri': git_url,
                'provider_params': {
                    'git_commit': git_ref
                }
            },
            'image':
            image,
            'openshift_build_selflink':
            build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        self.remove_plugins_without_parameters()
        # make sure the final json is valid
        read_yaml(json.dumps(self.plugins_json), 'schemas/plugins.json')

        return input_json
    def get_image_info_from_annotations(self):
        annotations = get_build_json().get("metadata", {}).get('annotations', {})
        if 'from' in annotations:
            scratch_from = json.loads(annotations['from'])

            try:
                return self.process_image_from(scratch_from)
            except UnknownKindException:
                raise RuntimeError("Build annotation has unknown 'kind' %s" %
                                   scratch_from['kind'])
        else:
            raise RuntimeError("Build wasn't created from BuildConfig and neither"
                               " has 'from' annotation, which is needed for specified arch")
    def get_image_info_from_annotations(self):
        annotations = get_build_json().get("metadata", {}).get('annotations', {})
        if 'from' in annotations:
            scratch_from = json.loads(annotations['from'])

            try:
                return self.process_image_from(scratch_from)
            except UnknownKindException:
                raise RuntimeError("Build annotation has unknown 'kind' %s" %
                                   scratch_from['kind'])
        else:
            raise RuntimeError("Build wasn't created from BuildConfig and neither"
                               " has 'from' annotation, which is needed for specified arch")
예제 #26
0
    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output, output_file = self.get_output(worker_metadatas, buildroot_id)
        osbs_logs = OSBSLogs(self.log)
        output_files = [add_log_type(add_buildroot_id(md, buildroot_id))
                        for md in osbs_logs.get_log_files(self.osbs, self.build_id)]

        output.extend([of.metadata for of in output_files])
        if output_file:
            output_files.append(output_file)

        # add remote source tarball and remote-source.json files to output
        for remote_source_output in [
            get_source_tarball_output(self.workflow),
            get_remote_source_json_output(self.workflow)
        ]:
            if remote_source_output:
                add_custom_type(remote_source_output, KOJI_BTYPE_REMOTE_SOURCES)
                remote_source = add_buildroot_id(remote_source_output, buildroot_id)
                output_files.append(remote_source)
                output.append(remote_source.metadata)

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files
    def run(self):
        """
        run the plugin
        """

        parser = df_parser(self.workflow.builder.df_path, workflow=self.workflow)
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)

        try:
            component = dockerfile_labels[component_label]
        except KeyError:
            raise RuntimeError("missing label: {}".format(component_label))

        version_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        try:
            version = dockerfile_labels[version_label]
        except KeyError:
            raise RuntimeError('missing label: {}'.format(version_label))

        try:
            _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
        except KeyError:
            release = None

        if release:
            if not self.append:
                self.log.debug("release set explicitly so not incrementing")
                if not is_scratch_build():
                    self.check_build_existence_for_explicit_release(component, version, release)
                return

        if self.append:
            next_release = self.get_next_release_append(component, version, release)
        elif is_scratch_build():
            metadata = get_build_json().get("metadata", {})
            next_release = metadata.get("name", "1")
        else:
            next_release = self.get_next_release_standard(component, version)

        # Always set preferred release label - other will be set if old-style
        # label is present
        release_label = labels.LABEL_NAMES[Labels.LABEL_TYPE_RELEASE][0]

        # No release labels are set so set them
        self.log.info("setting %s=%s", release_label, next_release)
        # Write the label back to the file (this is a property setter)
        dockerfile_labels[release_label] = next_release
    def set_build_image(self):
        """
        Overrides build_image for worker, to be same as in orchestrator build
        """
        spec = get_build_json().get("spec")
        try:
            build_name = spec['strategy']['customStrategy']['from']['name']
            build_kind = spec['strategy']['customStrategy']['from']['kind']
        except KeyError:
            raise RuntimeError("Build object is malformed, failed to fetch buildroot image")

        if build_kind == 'DockerImage':
            self.config_kwargs['build_image'] = build_name
        else:
            raise RuntimeError("Build kind isn't 'DockerImage' but %s" % build_kind)
예제 #29
0
    def run(self):
        """
        each plugin has to implement this method -- it is used to run the plugin actually

        response from plugin is kept and used in json result response
        """
        build_json = get_build_json()
        git_url = os.environ['SOURCE_URI']
        git_ref = os.environ.get('SOURCE_REF', None)
        image = os.environ['OUTPUT_IMAGE']
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)

        try:
            self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS']
        except KeyError:
            try:
                self.plugins_json = os.environ['DOCK_PLUGINS']
            except KeyError:
                raise RuntimeError("No plugin configuration found!")
            else:
                self.log.warning("DOCK_PLUGINS is deprecated - please update your osbs-client!")

        self.plugins_json = json.loads(self.plugins_json)

        input_json = {
            'source': {
                'provider': 'git',
                'uri': git_url,
                'provider_params': {'git_commit': git_ref}
            },
            'image': image,
            'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        # Compatibility code for dockerfile_content plugin
        # If this (removed) plugin is requested, ignore it.
        if 'prebuild_plugins' in self.plugins_json:
            for index, plugin in enumerate(self.plugins_json['prebuild_plugins']):
                if plugin['name'] == 'dockerfile_content':
                    self.log.info("removing dockerfile_content plugin request; "
                                  "please update your osbs-client!")
                    del self.plugins_json['prebuild_plugins'][index]
                    break

        return input_json
예제 #30
0
    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        buildroot = get_buildroot(build_id=self.build_id,
                                  tasker=self.tasker,
                                  osbs=self.osbs,
                                  rpms=True)
        output_files, _ = get_output(workflow=self.workflow,
                                     buildroot_id=buildroot['id'],
                                     pullspec=self.pullspec_image,
                                     platform=self.platform,
                                     source_build=False,
                                     logs=self.get_logs())

        output = [output.metadata for output in output_files]
        koji_metadata = {
            'metadata_version': metadata_version,
            'buildroots': [buildroot],
            'output': output,
        }
        self.update_buildroot_koji(buildroot, output)

        return koji_metadata, output_files
예제 #31
0
    def run(self):
        """
        run the plugin
        """

        parser = df_parser(self.workflow.builder.df_path,
                           workflow=self.workflow)
        dockerfile_labels = parser.labels
        labels = Labels(dockerfile_labels)

        try:
            _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE)
            if not self.append:
                self.log.debug("release set explicitly so not incrementing")
                return
        except KeyError:
            release = None

        component_label = labels.get_name(Labels.LABEL_TYPE_COMPONENT)

        try:
            component = dockerfile_labels[component_label]
        except KeyError:
            raise RuntimeError("missing label: {}".format(component_label))

        version_label = labels.get_name(Labels.LABEL_TYPE_VERSION)
        try:
            version = dockerfile_labels[version_label]
        except KeyError:
            raise RuntimeError('missing label: {}'.format(version_label))

        if self.append:
            next_release = self.get_next_release_append(
                component, version, release)
        elif is_scratch_build():
            metadata = get_build_json().get("metadata", {})
            next_release = metadata.get("name", "1")
        else:
            next_release = self.get_next_release_standard(component, version)

        # Always set preferred release label - other will be set if old-style
        # label is present
        release_label = labels.LABEL_NAMES[Labels.LABEL_TYPE_RELEASE][0]

        # No release labels are set so set them
        self.log.info("setting %s=%s", release_label, next_release)
        # Write the label back to the file (this is a property setter)
        dockerfile_labels[release_label] = next_release
예제 #32
0
    def get_koji_user(self):
        try:
            metadata = get_build_json()['metadata']
        except KeyError:
            msg = 'Unable to get koji user: No build metadata'
            self.log.exception('Unable to get koji user: No build metadata')
            raise ValueError(msg)

        try:
            koji_task_id = int(metadata.get('labels').get('koji-task-id'))
        except (ValueError, TypeError, AttributeError):
            msg = 'Unable to get koji user: Invalid Koji task ID'
            self.log.exception(msg)
            raise ValueError(msg)

        koji_session = get_koji_session(self.workflow, NO_FALLBACK)
        return get_koji_task_owner(koji_session, koji_task_id).get('name')
    def set_build_image(self):
        """
        Overrides build_image for worker, to be same as in orchestrator build
        """
        spec = get_build_json().get("spec")
        try:
            build_name = spec['strategy']['customStrategy']['from']['name']
            build_kind = spec['strategy']['customStrategy']['from']['kind']
        except KeyError:
            raise RuntimeError(
                "Build object is malformed, failed to fetch buildroot image")

        if build_kind == 'DockerImage':
            self.config_kwargs['build_image'] = build_name
        else:
            raise RuntimeError("Build kind isn't 'DockerImage' but %s" %
                               build_kind)
예제 #34
0
    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None
    def run(self):
        """
        Pull parent images and retag them uniquely for this build.
        """
        build_json = get_build_json()
        current_platform = platform.processor() or 'x86_64'
        self.manifest_list_cache = {}
        organization = get_registries_organization(self.workflow)

        for nonce, parent in enumerate(sorted(self.workflow.builder.parent_images.keys(),
                                              key=str)):
            image = parent
            is_base_image = False
            # original_base_image is an ImageName, so compare parent as an ImageName also
            if image == self.workflow.builder.original_base_image:
                is_base_image = True
                image = self._resolve_base_image(build_json)

            image = self._ensure_image_registry(image)

            if organization:
                image.enclose(organization)
                parent.enclose(organization)

            if self.check_platforms:
                self._validate_platforms_in_image(image)

                new_arch_image = self._get_image_for_different_arch(image, current_platform)
                if new_arch_image:
                    image = new_arch_image

            if self.inspect_only:
                new_image = image
            else:
                new_image = self._pull_and_tag_image(image, build_json, str(nonce))
            self.workflow.builder.recreate_parent_images()
            self.workflow.builder.parent_images[parent] = new_image

            if is_base_image:
                if organization:
                    # we want to be sure we have original_base_image enclosed as well
                    self.workflow.builder.original_base_image.enclose(organization)
                self.workflow.builder.set_base_image(str(new_image),
                                                     parents_pulled=not self.inspect_only,
                                                     insecure=self.parent_registry_insecure)
예제 #36
0
    def run(self):
        """
        each plugin has to implement this method -- it is used to run the plugin actually

        response from plugin is kept and used in json result response
        """
        build_json = get_build_json()
        git_url = os.environ['SOURCE_URI']
        git_ref = os.environ.get('SOURCE_REF', None)
        image = os.environ['OUTPUT_IMAGE']
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)

        try:
            self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS']
        except KeyError:
            try:
                self.plugins_json = os.environ['DOCK_PLUGINS']
            except KeyError:
                raise RuntimeError("No plugin configuration found!")
            else:
                self.log.warning(
                    "DOCK_PLUGINS is deprecated - please update your osbs-client!"
                )

        self.plugins_json = json.loads(self.plugins_json)

        input_json = {
            'source': {
                'provider': 'git',
                'uri': git_url,
                'provider_params': {
                    'git_commit': git_ref
                }
            },
            'image':
            image,
            'openshift_build_selflink':
            build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        return input_json
예제 #37
0
    def next_release_general(self, component, version, release, release_label,
                             dockerfile_labels):
        """
        get next release for build and set it in dockerfile
        """
        if is_scratch_build(self.workflow):
            # no need to append for scratch build
            metadata = get_build_json().get("metadata", {})
            next_release = metadata.get("name", "1")
        elif self.append:
            next_release = self.get_next_release_append(
                component, version, release)
        else:
            next_release = self.get_next_release_standard(component, version)

        # No release labels are set so set them
        self.log.info("setting %s=%s", release_label, next_release)
        # Write the label back to the file (this is a property setter)
        dockerfile_labels[release_label] = next_release
예제 #38
0
    def __init__(self, tasker, workflow, triggered_after_koji_task=None):
        """
        constructor

        :param tasker: ContainerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param triggered_after_koji_task: int, original koji task for autorebuild,
            provided only when this plugin creates new koji task for autorebuild
        """
        # call parent constructor
        super(KojiDelegatePlugin, self).__init__(tasker, workflow)

        koji_setting = get_koji(self.workflow, NO_FALLBACK)
        self.delegate_enabled = koji_setting.get('delegate_task', True)
        self.task_priority = koji_setting.get('delegated_task_priority', None)
        self.triggered_after_koji_task = triggered_after_koji_task
        self.metadata = get_build_json().get("metadata", {})
        self.kojisession = get_koji_session(self.workflow, NO_FALLBACK)
        self.osbs = None
예제 #39
0
    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, self.koji_upload_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        md_fragment = "{}-md".format(get_build_json()['metadata']['name'])
        md_fragment_key = 'metadata.json'
        cm_data = {md_fragment_key: koji_metadata}
        annotations = {
            "metadata_fragment": "configmap/" + md_fragment,
            "metadata_fragment_key": md_fragment_key
        }

        try:
            self.osbs.create_config_map(md_fragment, cm_data)
        except OsbsException:
            self.log.debug("metadata: %r", koji_metadata)
            self.log.debug("annotations: %r", annotations)
            raise

        return annotations
예제 #40
0
    def get_koji_user(self):
        unknown_user = get_cachito(self.workflow).get('unknown_user',
                                                      'unknown_user')
        try:
            metadata = get_build_json()['metadata']
        except KeyError:
            msg = 'Unable to get koji user: No build metadata'
            self.log.warning(msg)
            return unknown_user

        try:
            koji_task_id = int(metadata.get('labels').get('koji-task-id'))
        except (ValueError, TypeError, AttributeError):
            msg = 'Unable to get koji user: Invalid Koji task ID'
            self.log.warning(msg)
            return unknown_user

        koji_session = get_koji_session(self.workflow)
        return get_koji_task_owner(koji_session,
                                   koji_task_id).get('name', unknown_user)
예제 #41
0
    def run(self):
        metadata = get_build_json().get("metadata", {})
        kwargs = {}

        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=self.url,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            osbs.get_image_stream(self.imagestream)
        except OsbsResponseException:
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            # Tags are imported automatically on creation
            osbs.create_image_stream(self.imagestream, self.docker_image_repo,
                                     **kwargs)
        else:
            self.log.info("Importing new tags for %s", self.imagestream)

            attempts = 0
            while not osbs.import_image(self.imagestream):
                attempts += 1

                if attempts >= self.import_attempts:
                    msg = "Failed to import new tags for %s"
                    raise RuntimeError(msg % self.imagestream)

                self.log.info(
                    "no new tags, will retry after %d seconds (%d/%d)",
                    self.retry_delay, attempts, self.import_attempts)
                sleep(self.retry_delay)
예제 #42
0
    def run(self):
        """
        each plugin has to implement this method -- it is used to run the plugin actually

        response from plugin is kept and used in json result response
        """
        build_json = get_build_json()
        git_url = os.environ['SOURCE_URI']
        git_ref = os.environ.get('SOURCE_REF', None)
        image = os.environ['OUTPUT_IMAGE']
        self.target_registry = os.environ.get('OUTPUT_REGISTRY', None)

        try:
            self.plugins_json = os.environ['ATOMIC_REACTOR_PLUGINS']
        except KeyError:
            try:
                self.plugins_json = os.environ['DOCK_PLUGINS']
            except KeyError:
                raise RuntimeError("No plugin configuration found!")
            else:
                self.log.warning("DOCK_PLUGINS is deprecated - please update your osbs-client!")

        self.plugins_json = json.loads(self.plugins_json)

        self.preprocess_plugins()

        input_json = {
            'source': {
                'provider': 'git',
                'uri': git_url,
                'provider_params': {'git_commit': git_ref}
            },
            'image': image,
            'openshift_build_selflink': build_json.get('metadata', {}).get('selfLink', None)
        }
        input_json.update(self.plugins_json)

        self.log.debug("build json: %s", input_json)

        return input_json
    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        output = [output.metadata for output in output_files]
        koji_metadata = {
            'metadata_version': metadata_version,
            'buildroots': [buildroot],
            'output': output,
        }
        self.update_buildroot_koji(buildroot, output)

        return koji_metadata, output_files
예제 #44
0
    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(
            PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output = self.get_output(worker_metadatas)
        osbs_logs = OSBSLogs(self.log)
        output_files = [
            add_log_type(add_buildroot_id(md, buildroot_id))
            for md in osbs_logs.get_log_files(self.osbs, self.build_id)
        ]
        output.extend([of.metadata for of in output_files])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files
예제 #45
0
    def run(self):
        metadata = get_build_json().get("metadata", {})
        kwargs = {}

        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(openshift_uri=self.url,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            osbs.get_image_stream(self.imagestream)
        except OsbsResponseException:
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            # Tags are imported automatically on creation
            osbs.create_image_stream(self.imagestream, self.docker_image_repo,
                                     **kwargs)
        else:
            self.log.info("Importing tags for %s", self.imagestream)
            retry_attempts = 3
            while True:
                result = osbs.import_image(self.imagestream, **kwargs)
                if result:
                    break

                if retry_attempts > 0:
                    retry_attempts -= 1
                    self.log.info("no new tags, will retry after %d seconds",
                                  self.retry_delay)
                    sleep(self.retry_delay)
    def run(self):
        """
        Run the plugin.
        """
        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        if not is_scratch_build():
            try:
                session = get_koji_session(self.workflow, self.koji_fallback)
                for output in output_files:
                    if output.file:
                        self.upload_file(session, output, self.koji_upload_dir)
            finally:
                for output in output_files:
                    if output.file:
                        output.file.close()

        md_fragment = "{}-md".format(get_build_json()['metadata']['name'])
        md_fragment_key = 'metadata.json'
        cm_data = {md_fragment_key: koji_metadata}
        annotations = {
            "metadata_fragment": "configmap/" + md_fragment,
            "metadata_fragment_key": md_fragment_key
        }

        try:
            self.osbs.create_config_map(md_fragment, cm_data)
        except OsbsException:
            self.log.debug("metadata: %r", koji_metadata)
            self.log.debug("annotations: %r", annotations)
            raise

        return annotations
    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output = self.get_output(worker_metadatas)
        osbs_logs = OSBSLogs(self.log)
        output_files = [add_log_type(add_buildroot_id(md, buildroot_id))
                        for md in osbs_logs.get_log_files(self.osbs, self.build_id)]
        output.extend([of.metadata for of in output_files])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files
    def run(self):
        """
        run the plugin
        """

        metadata = get_build_json().get("metadata", {})
        labels = metadata.get("labels", {})
        buildconfig = labels["buildconfig"]
        is_rebuild = labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild

            # FIXME: remove `openshift_uri` once osbs-client is released
            osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                      openshift_url=self.url,
                                      use_auth=self.use_auth,
                                      verify_ssl=self.verify_ssl,
                                      namespace=metadata.get('namespace', None))
            osbs = OSBS(osbs_conf, osbs_conf)
            labels = {self.label_key: self.label_value}
            try:
                osbs.set_labels_on_build_config(buildconfig, labels)
            except OsbsResponseException as ex:
                if ex.status_code == 409:
                    # Someone else was modifying the build
                    # configuration at the same time. Try again.
                    self.log.debug("got status %d, retrying", ex.status_code)
                    osbs.set_labels_on_build_config(buildconfig, labels)
                else:
                    raise

        return is_rebuild
    def __init__(self, tasker, workflow, build_kwargs, platforms=None,
                 osbs_client_config=None, worker_build_image=None,
                 config_kwargs=None,
                 find_cluster_retry_delay=FIND_CLUSTER_RETRY_DELAY,
                 failure_retry_delay=FAILURE_RETRY_DELAY,
                 max_cluster_fails=MAX_CLUSTER_FAILS,
                 url=None, verify_ssl=True, use_auth=True,
                 goarch=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param build_kwargs: dict, keyword arguments for starting worker builds
        :param platforms: list<str>, platforms to build
                          (used via utils.get_orchestrator_platforms())
        :param osbs_client_config: str, path to directory containing osbs.conf
        :param worker_build_image: str, the builder image to use for worker builds
                                  (not used, image is inherited from the orchestrator)
        :param config_kwargs: dict, keyword arguments to override worker configuration
        :param find_cluster_retry_delay: the delay in seconds to try again reaching a cluster
        :param failure_retry_delay: the delay in seconds to try again starting a build
        :param max_cluster_fails: the maximum number of times a cluster can fail before being
                                  ignored
        :param goarch: dict, keys are platform, values are go language platform names
        """
        super(OrchestrateBuildPlugin, self).__init__(tasker, workflow)
        self.platforms = get_platforms(self.workflow)

        self.build_kwargs = build_kwargs
        self.osbs_client_config_fallback = osbs_client_config
        self.config_kwargs = config_kwargs or {}

        self.adjust_build_kwargs()
        self.validate_arrangement_version()
        self.adjust_config_kwargs()
        self.reactor_config = get_config(self.workflow)

        self.find_cluster_retry_delay = find_cluster_retry_delay
        self.failure_retry_delay = failure_retry_delay
        self.max_cluster_fails = max_cluster_fails
        self.koji_upload_dir = self.get_koji_upload_dir()
        self.fs_task_id = self.get_fs_task_id()
        self.release = self.get_release()

        self.plat_des_fallback = []
        for plat, architecture in (goarch or {}).items():
            plat_dic = {'platform': plat,
                        'architecture': architecture}
            self.plat_des_fallback.append(plat_dic)

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth}
        }

        if worker_build_image:
            self.log.warning('worker_build_image is deprecated')

        self.worker_builds = []
        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        self.build_image_digests = {}  # by platform
        self._openshift_session = None
        self.build_image_override = get_build_image_override(workflow, {})
        self.platform_descriptors = get_platform_descriptors(self.workflow, self.plat_des_fallback)
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url,
                                  use_auth=self.use_auth, verify_ssl=self.verify_ssl,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        base_image = self.workflow.builder.base_image
        if base_image is not None:
            base_image_name = base_image.to_str()
            try:
                base_image_id = self.workflow.base_image_inspect['Id']
            except KeyError:
                base_image_id = ""
        else:
            base_image_name = ""
            base_image_id = ""

        try:
            with open(self.workflow.builder.df_path) as f:
                dockerfile_contents = f.read()
        except AttributeError:
            dockerfile_contents = ""

        annotations = {
            "dockerfile": dockerfile_contents,

            # We no longer store the 'docker build' logs as an annotation
            "logs": '',

            # We no longer store the rpm packages as an annotation
            "rpm-packages": '',

            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": base_image_name,
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "plugins-metadata": json.dumps(self.get_plugin_metadata())
        }

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                annotations['help_file'] = json.dumps(None)
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                annotations['help_file'] = json.dumps(help_result['help_file'])
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        pulp_push_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PUSH_KEY)
        if pulp_push_results:
            top_layer, _ = pulp_push_results
            annotations['v1-image-id'] = top_layer

        media_types = []
        if pulp_push_results:
            media_types += [MEDIA_TYPE_DOCKER_V1]

        # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin
        pulp_pull_results = (self.workflow.postbuild_results.get(PulpPullPlugin.key) or
                             self.workflow.exit_results.get(PulpPullPlugin.key))
        if isinstance(pulp_pull_results, Exception):
            pulp_pull_results = None

        if pulp_pull_results:
            media_types += pulp_pull_results

        if media_types:
            annotations['media-types'] = json.dumps(sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })

        annotations.update(self.get_config_map())

        self.apply_build_result_annotations(annotations)

        # For arrangement version 4 onwards (where group_manifests
        # runs in the orchestrator build), restore the repositories
        # metadata which orchestrate_build adjusted.
        if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results:
            annotations['repositories'] = json.dumps(self.get_repositories())
        try:
            osbs.set_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
예제 #51
0
    def run(self):
        """
        pull base image
        """
        base_image = self.workflow.builder.base_image
        if self.parent_registry is not None:
            self.log.info("pulling base image '%s' from registry '%s'",
                          base_image, self.parent_registry)
        else:
            self.log.info("pulling base image '%s'", base_image)

        base_image_with_registry = base_image.copy()

        if self.parent_registry:
            # registry in dockerfile doesn't match provided source registry
            if base_image.registry and base_image.registry != self.parent_registry:
                self.log.error("registry in dockerfile doesn't match provided source registry, "
                               "dockerfile = '%s', provided = '%s'",
                               base_image.registry, self.parent_registry)
                raise RuntimeError(
                    "Registry specified in dockerfile doesn't match provided one. "
                    "Dockerfile: '%s', Provided: '%s'"
                    % (base_image.registry, self.parent_registry))

            base_image_with_registry.registry = self.parent_registry

        try:
            self.tasker.pull_image(base_image_with_registry,
                                   insecure=self.parent_registry_insecure)

        except RetryGeneratorException as original_exc:
            if base_image_with_registry.namespace == 'library':
                raise

            self.log.info("'%s' not found", base_image_with_registry.to_str())
            base_image_with_registry.namespace = 'library'
            self.log.info("trying '%s'", base_image_with_registry.to_str())

            try:
                self.tasker.pull_image(base_image_with_registry,
                                       insecure=self.parent_registry_insecure)

            except RetryGeneratorException:
                raise original_exc

        pulled_base = base_image_with_registry.to_str()
        self.workflow.pulled_base_images.add(pulled_base)

        # Attempt to tag it using a unique ID. We might have to retry
        # if another build with the same parent image is finishing up
        # and removing images it pulled.

        # Use the OpenShift build name as the unique ID
        unique_id = get_build_json()['metadata']['name']
        buildid_base_image = ImageName(repo=unique_id)

        for _ in range(20):
            try:
                self.log.info("tagging pulled image")
                response = self.tasker.tag_image(base_image_with_registry,
                                                 buildid_base_image)
                self.workflow.pulled_base_images.add(response)
                break
            except docker.errors.NotFound:
                # If we get here, some other build raced us to remove
                # the parent image, and that build won.
                # Retry the pull immediately.
                self.log.info("re-pulling removed image")
                self.tasker.pull_image(base_image_with_registry,
                                       insecure=self.parent_registry_insecure)
        else:
            # Failed to tag it
            self.log.error("giving up trying to pull image")
            raise RuntimeError("too many attempts to pull and tag image")

        # Make sure registry image is tagged with plain base image name
        # as other plugins would use base_image to inspect it
        response = self.tasker.tag_image(base_image_with_registry,
                                         base_image)
        self.workflow.pulled_base_images.add(response)

        self.workflow.builder.set_base_image(base_image.to_str())
        self.log.debug("image '%s' is available", pulled_base)
예제 #52
0
    def _fetch_log_files(self):
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        build_id = get_build_json()['metadata']['name'] or {}
        osbs_logs = OSBSLogs(self.log)

        return osbs_logs.get_log_files(osbs, build_id)
예제 #53
0
    def __init__(self, tasker, workflow,
                 smtp_host=None, from_address=None,
                 send_on=(AUTO_CANCELED, AUTO_FAIL, MANUAL_SUCCESS, MANUAL_FAIL),
                 url=None,
                 error_addresses=(),
                 additional_addresses=(),
                 email_domain=None,
                 koji_hub=None,
                 koji_root=None,
                 koji_proxyuser=None,
                 koji_ssl_certs_dir=None,
                 koji_krb_principal=None,
                 koji_krb_keytab=None,
                 to_koji_submitter=False,
                 to_koji_pkgowner=False,
                 use_auth=None,
                 verify_ssl=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param send_on: list of str, list of build states when a notification should be sent
            see 'allowed_states' constant and rules in '_should_send' function
        :param url: str, URL to OSv3 instance where the build logs are stored
        :param smtp_host: str, URL of SMTP server to use to send the message (e.g. "foo.com:25")
        :param from_address: str, the "From" of the notification email
        :param error_addresses: list of str, list of email addresses where to send an email
            if an error occurred (e.g. if we can't find out who to notify about the failed build)
        :param additional_addresses: list of str, always send a message to these email addresses
        :param email_domain: str, email domain used when email addresses cannot be fetched via
            kerberos principal
        :param koji_hub: str, koji hub (xmlrpc)
        :param koji_root: str, koji root (storage)
        :param koji_proxyuser: str, proxy user
        :param koji_ssl_certs_dir: str, path to "cert", "ca", and "serverca"
        :param koji_krb_principal: str, name of Kerberos principal
        :param koji_krb_keytab: str, Kerberos keytab
        :param to_koji_submitter: bool, send a message to the koji submitter
        :param to_koji_pkgowner: bool, send messages to koji package owners
        """
        super(SendMailPlugin, self).__init__(tasker, workflow)
        self.submitter = self.DEFAULT_SUBMITTER
        self.send_on = set(send_on)

        self.smtp_fallback = {
            'host': smtp_host,
            'from_address': from_address,
            'additional_addresses': list(additional_addresses),
            'error_addresses': list(error_addresses),
            'domain': email_domain,
            'send_to_submitter': to_koji_submitter,
            'send_to_pkg_owner': to_koji_pkgowner
        }
        smtp = get_smtp(self.workflow, self.smtp_fallback)
        self.additional_addresses = smtp.get('additional_addresses', ())
        self.from_address = smtp['from_address']
        self.error_addresses = smtp.get('error_addresses', ())
        self.email_domain = smtp.get('domain')
        self.to_koji_submitter = smtp.get('send_to_submitter', False)
        self.to_koji_pkgowner = smtp.get('send_to_pkg_owner', False)

        self.koji_fallback = {
            'hub_url': koji_hub,
            'root_url': koji_root,
            'auth': {
                'proxyuser': koji_proxyuser,
                'ssl_certs_dir': koji_ssl_certs_dir,
                'krb_principal': str(koji_krb_principal),
                'krb_keytab_path': str(koji_krb_keytab)
            }
        }

        self.openshift_fallback = {
            'url': url,
            'insecure': not verify_ssl,
            'auth': {'enable': use_auth}
        }
        self.url = get_openshift(self.workflow, self.openshift_fallback)['url']

        try:
            metadata = get_build_json().get("metadata", {})
            self.koji_task_id = int(metadata['labels']['koji-task-id'])
        except Exception:
            self.log.exception("Failed to fetch koji task ID")
            self.koji_task_id = None
        else:
            self.log.info("Koji task ID: %s", self.koji_task_id)

        self.koji_build_id = self.workflow.exit_results.get(KojiImportPlugin.key)
        if not self.koji_build_id:
            self.koji_build_id = self.workflow.exit_results.get(KojiPromotePlugin.key)
            if not self.koji_build_id:
                self.log.info("Failed to fetch koji build ID")
            else:
                self.log.info("Koji build ID: %s", self.koji_build_id)
        else:
            self.log.info("Koji build ID: %s", self.koji_build_id)

        self.session = None
        if get_koji(self.workflow, self.koji_fallback)['hub_url']:
            try:
                self.session = get_koji_session(self.workflow, self.koji_fallback)
            except Exception:
                self.log.exception("Failed to connect to koji")
                self.session = None
            else:
                self.log.info("Koji connection established")
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url,
                                  use_auth=self.use_auth, verify_ssl=self.verify_ssl,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        try:
            base_image_id = self.workflow.base_image_inspect['Id']
        except docker.errors.NotFound:
            base_image_id = ""

        annotations = {
            "dockerfile": self.get_pre_result(CpDockerfilePlugin.key),
            "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key),

            # We no longer store the 'docker build' logs as an annotation
            "logs": '',

            # We no longer store the rpm packages as an annotation
            "rpm-packages": '',

            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": self.workflow.builder.base_image.to_str(),
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "plugins-metadata": json.dumps(self.get_plugin_metadata())
        }

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })
        try:
            osbs.set_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
예제 #55
0
    def __init__(self, tasker, workflow,
                 smtp_host, from_address,
                 send_on=(AUTO_CANCELED, AUTO_FAIL, MANUAL_SUCCESS, MANUAL_FAIL),
                 url=None,
                 error_addresses=(),
                 additional_addresses=(),
                 email_domain=None,
                 koji_hub=None,
                 koji_root=None,
                 koji_proxyuser=None,
                 koji_ssl_certs_dir=None,
                 koji_krb_principal=None,
                 koji_krb_keytab=None,
                 to_koji_submitter=False,
                 to_koji_pkgowner=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param send_on: list of str, list of build states when a notification should be sent
            see 'allowed_states' constant and rules in '_should_send' function
        :param url: str, URL to OSv3 instance where the build logs are stored
        :param smtp_host: str, URL of SMTP server to use to send the message (e.g. "foo.com:25")
        :param from_address: str, the "From" of the notification email
        :param error_addresses: list of str, list of email addresses where to send an email
            if an error occurred (e.g. if we can't find out who to notify about the failed build)
        :param additional_addresses: list of str, always send a message to these email addresses
        :param email_domain: str, email domain used when email addresses cannot be fetched via
            kerberos principal
        :param koji_hub: str, koji hub (xmlrpc)
        :param koji_root: str, koji root (storage)
        :param koji_proxyuser: str, proxy user
        :param koji_ssl_certs_dir: str, path to "cert", "ca", and "serverca"
        :param koji_krb_principal: str, name of Kerberos principal
        :param koji_krb_keytab: str, Kerberos keytab
        :param to_koji_submitter: bool, send a message to the koji submitter
        :param to_koji_pkgowner: bool, send messages to koji package owners
        """
        super(SendMailPlugin, self).__init__(tasker, workflow)
        self.send_on = set(send_on)
        self.url = url
        self.additional_addresses = list(additional_addresses)
        self.smtp_host = smtp_host
        self.from_address = from_address
        self.error_addresses = list(error_addresses)
        self.email_domain = email_domain
        self.koji_hub = koji_hub
        # Make sure koji_root doesn't end with a slash for a prettier link
        self.koji_root = koji_root[:-1] if koji_root and koji_root[-1] == '/' else koji_root
        self.koji_auth_info = {
            'proxyuser': koji_proxyuser,
            'ssl_certs_dir': koji_ssl_certs_dir,
            'krb_principal': koji_krb_principal,
            'krb_keytab': koji_krb_keytab,
        }
        self.to_koji_submitter = to_koji_submitter
        self.to_koji_pkgowner = to_koji_pkgowner
        self.submitter = self.DEFAULT_SUBMITTER

        try:
            metadata = get_build_json().get("metadata", {})
            self.koji_task_id = int(metadata['labels']['koji-task-id'])
        except Exception:
            self.log.exception("Failed to fetch koji task ID")
            self.koji_task_id = None
        else:
            self.log.info("Koji task ID: %s", self.koji_task_id)

        self.koji_build_id = self.workflow.exit_results.get(KojiImportPlugin.key)
        if not self.koji_build_id:
            self.koji_build_id = self.workflow.exit_results.get(KojiPromotePlugin.key)
            if not self.koji_build_id:
                self.log.info("Failed to fetch koji build ID")
            else:
                self.log.info("Koji build ID: %s", self.koji_build_id)
        else:
            self.log.info("Koji build ID: %s", self.koji_build_id)

        try:
            self.session = create_koji_session(self.koji_hub, self.koji_auth_info)
        except Exception:
            self.log.exception("Failed to connect to koji")
            self.session = None
        else:
            self.log.info("Koji connection established")
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        if hasattr(self.workflow.builder, "original_base_image"):
            base_image = self.workflow.builder.original_base_image
        else:
            base_image = self.workflow.builder.base_image
        if base_image is not None and not self.workflow.builder.base_from_scratch:
            base_image_name = base_image.to_str()
            try:
                base_image_id = self.workflow.builder.base_image_inspect['Id']
            except KeyError:
                base_image_id = ""
        else:
            base_image_name = ""
            base_image_id = ""

        try:
            with open(self.workflow.builder.df_path) as f:
                dockerfile_contents = f.read()
        except AttributeError:
            dockerfile_contents = ""

        parent_images_strings = self.workflow.builder.parent_images_to_str()
        if self.workflow.builder.base_from_scratch:
            parent_images_strings[SCRATCH_FROM] = SCRATCH_FROM

        annotations = {
            "dockerfile": dockerfile_contents,
            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": base_image_name,
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "parent_images": json.dumps(parent_images_strings),
            "plugins-metadata": json.dumps(self.get_plugin_metadata()),
            "filesystem": json.dumps(self.get_filesystem_metadata()),
        }

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                annotations['help_file'] = json.dumps(None)
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                annotations['help_file'] = json.dumps(help_result['help_file'])
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        pulp_push_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PUSH_KEY)
        if pulp_push_results:
            top_layer, _ = pulp_push_results
            annotations['v1-image-id'] = top_layer

        media_types = []
        if pulp_push_results:
            media_types += [MEDIA_TYPE_DOCKER_V1]

        # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin
        # verify_media_results runs if pulp_pull does not
        media_results = (self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY) or
                         self.workflow.exit_results.get(PLUGIN_PULP_PULL_KEY) or
                         self.workflow.exit_results.get(PLUGIN_VERIFY_MEDIA_KEY))
        if isinstance(media_results, Exception):
            media_results = None

        if media_results:
            media_types += media_results

        if media_types:
            annotations['media-types'] = json.dumps(sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })

        annotations.update(self.get_config_map())

        self.apply_build_result_annotations(annotations)

        # For arrangement version 4 onwards (where group_manifests
        # runs in the orchestrator build), restore the repositories
        # metadata which orchestrate_build adjusted.
        if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results:
            annotations['repositories'] = json.dumps(self.get_repositories())
        try:
            osbs.update_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}