class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the 'docker save' image will not be
    uploaded, only the logs. The import will be marked as
    metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = PLUGIN_KOJI_PROMOTE_PLUGIN_KEY
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """

        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        try:
            buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        except KeyError:
            return ''

        try:
            pod = self.osbs.get_pod_for_build(self.build_id)
            all_images = pod.get_container_image_ids()
        except OsbsException as ex:
            self.log.error("unable to find image id: %r", ex)
            return buildroot_tag

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_info = self.tasker.get_info()
        host_arch, docker_version = get_docker_architecture(self.tasker)

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version,
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
        else:
            # Deleted once closed
            logfile = NamedTemporaryFile(prefix=self.build_id,
                                         suffix=".log",
                                         mode='wb')
            try:
                logfile.write(logs)
            except (TypeError, UnicodeEncodeError):
                # Older osbs-client versions returned Unicode objects
                logfile.write(logs.encode('utf-8'))
            logfile.flush()
            metadata = self.get_output_metadata(logfile.name,
                                                "openshift-final.log")
            output.append(Output(file=logfile, metadata=metadata))

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='wb')
        docker_logs.write("\n".join(self.workflow.build_result.logs).encode('utf-8'))
        docker_logs.flush()
        output.append(Output(file=docker_logs,
                             metadata=self.get_output_metadata(docker_logs.name,
                                                               "build.log")))
        return output

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            return []

        try:
            sep = PostBuildRPMqaPlugin.sep
        except AttributeError:
            # sep instance variable added in Aug 2016
            sep = ','

        return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags,
                                     separator=sep)

    def get_image_output(self, arch):
        """
        Create the output for the image

        This is the Koji Content Generator metadata, along with the
        'docker save' output to upload.

        For metadata-only builds, an empty file is used instead of the
        output of 'docker save'.

        :param arch: str, architecture for this output
        :return: tuple, (metadata dict, Output instance)

        """

        image_id = self.workflow.builder.image_id
        saved_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = saved_image.split('.', 1)[1]
        name_fmt = 'docker-image-{id}.{arch}.{ext}'
        image_name = name_fmt.format(id=image_id, arch=arch, ext=ext)
        if self.metadata_only:
            metadata = self.get_output_metadata(os.path.devnull, image_name)
            output = Output(file=None, metadata=metadata)
        else:
            metadata = self.get_output_metadata(saved_image, image_name)
            output = Output(file=open(saved_image), metadata=metadata)

        return metadata, output

    def get_digests(self):
        """
        Returns a map of repositories to digests
        """

        digests = {}  # repository -> digest
        for registry in self.workflow.push_conf.docker_registries:
            for image in self.workflow.tag_conf.images:
                image_str = image.to_str()
                if image_str in registry.digests:
                    # pulp/crane supports only manifest schema v1
                    if self.workflow.push_conf.pulp_registries:
                        digest = registry.digests[image_str].v1
                    else:
                        digest = registry.digests[image_str].default
                    digests[image.to_str(registry=False)] = digest

        return digests

    def get_repositories(self, digests):
        """
        Build the repositories metadata

        :param digests: dict, repository -> digest
        """
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            image = self.pullspec_image.copy()
            image.registry = registry.uri
            pullspec = image.to_str()

            output_images.append(pullspec)

            digest = digests.get(image.to_str(registry=False))
            if digest:
                digest_pullspec = image.to_str(tag=False) + "@" + digest
                output_images.append(digest_pullspec)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']

        # Read config from the registry using v2 schema 2 digest
        registries = self.workflow.push_conf.docker_registries
        if registries:
            config = copy.deepcopy(registries[0].config)
        else:
            config = {}

        # We don't need container_config section
        if config and 'container_config' in config:
            del config['container_config']

        digests = self.get_digests()
        repositories = self.get_repositories(digests)
        arch = os.uname()[4]
        tags = set(image.tag for image in self.workflow.tag_conf.primary_images)
        metadata, output = self.get_image_output(arch)
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                    'tags': list(tags),
                    'config': config
                },
            },
        })

        if not config:
            del metadata['extra']['docker']['config']

        # Add the 'docker save' image to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """

        # krbV python library throws an error if these are unicode
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs,
            "krb_principal": str(self.koji_principal),
            "krb_keytab": str(self.koji_keytab)
        }
        return create_koji_session(str(self.kojihub), auth_info)

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        try:
            build_info = session.CGImport(koji_metadata, server_dir)
        except Exception:
            self.log.debug("metadata: %r", koji_metadata)
            raise

        # Older versions of CGImport do not return a value.
        build_id = build_info.get("id") if build_info else None

        self.log.debug("Build information: %s",
                       json.dumps(build_info, sort_keys=True, indent=4))

        # If configured, koji_tag_build plugin will perform build tagging
        tag_later = are_plugins_in_order(self.workflow.exit_plugins_conf,
                                         PLUGIN_KOJI_PROMOTE_PLUGIN_KEY,
                                         PLUGIN_KOJI_TAG_BUILD_KEY)
        if not tag_later and build_id is not None and self.target is not None:
            tag_koji_build(session, build_id, self.target,
                           poll_interval=self.poll_interval)

        return build_id
예제 #2
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the v1 image will not be uploaded, only
    the logs. The import will be marked as metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = "koji_promote"
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the v1 image
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only

        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.namespace = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """

        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        kwargs = {}
        if self.namespace is not None:
            kwargs['namespace'] = self.namespace
        pod = self.osbs.get_pod_for_build(self.build_id, **kwargs)
        all_images = pod.get_container_image_ids()

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_version = self.tasker.get_version()
        docker_info = self.tasker.get_info()
        host_arch = docker_version['Arch']
        if host_arch == 'amd64':
            host_arch = 'x86_64'

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version['Version'],
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        # Collect logs from server
        kwargs = {}
        if self.namespace is not None:
            kwargs['namespace'] = self.namespace
        logs = self.osbs.get_build_logs(self.build_id, **kwargs)

        # Deleted once closed
        logfile = NamedTemporaryFile(prefix=self.build_id,
                                     suffix=".log",
                                     mode='w')
        logfile.write(logs)
        logfile.flush()

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='w')
        docker_logs.write("\n".join(self.workflow.build_logs))
        docker_logs.flush()

        return [Output(file=docker_logs,
                       metadata=self.get_output_metadata(docker_logs.name,
                                                         "build.log")),
                Output(file=logfile,
                       metadata=self.get_output_metadata(logfile.name,
                                                         "openshift-final.log"))]

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            return []

        return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags,
                                     separator=',')

    def get_image_output(self):
        """
        Create the output for the image

        For v1, this is the v1 image. For v2, this is the v2 metadata
        with the checksum of an empty file, and no actual upload.

        :return: tuple, (metadata dict, Output instance)
        """

        image_id = self.workflow.builder.image_id
        v1_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = v1_image.split('.', 1)[1]
        if self.metadata_only:
            v2_image_name = 'docker-v2-image-{0}.{1}'.format(image_id, ext)
            metadata = self.get_output_metadata(os.path.devnull, v2_image_name)
            output = Output(file=None, metadata=metadata)
        else:
            v1_image_name = 'docker-v1-image-{0}.{1}'.format(image_id, ext)
            metadata = self.get_output_metadata(v1_image, v1_image_name)
            output = Output(file=open(v1_image), metadata=metadata)

        return metadata, output

    def get_output_images(self):
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            for image in (self.workflow.tag_conf.primary_images +
                          self.workflow.tag_conf.unique_images):
                registry_image = image.copy()
                registry_image.registry = registry.uri
                if registry_image not in output_images:
                    output_images.append(registry_image)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']
        output_images = self.get_output_images()
        repositories = [image.to_str() for image in output_images
                        if image.tag != 'latest']
        arch = os.uname()[4]
        metadata, output = self.get_image_output()
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                },
            },
        })

        # Add the v1 image (or v2 metadata) to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        build_start_time = metadata["creationTimestamp"]
        try:
            # Decode UTC RFC3339 date with no fractional seconds
            # (the format we expect)
            start_time_struct = time.strptime(build_start_time,
                                              '%Y-%m-%dT%H:%M:%SZ')
            start_time = int(time.mktime(start_time_struct))
        except ValueError:
            self.log.error("Invalid time format (%s)", build_start_time)
            raise

        name = None
        version = None
        release = None
        for image_name in self.workflow.tag_conf.primary_images:
            if '-' in image_name.tag:
                name = image_name.repo
                version, release = image_name.tag.split('-', 1)

        if name is None or version is None or release is None:
            raise RuntimeError('Unable to determine name-version-release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        build = {
            'name': name,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': {
                'image': {},
            },
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        try:
            metadata = build_json["metadata"]
            self.build_id = metadata["name"]
            self.namespace = metadata.get("namespace")
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)
        session.uploadWrapper(output.file.name, serverdir, name=name)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """
        session = koji.ClientSession(self.kojihub)
        kwargs = {}
        if self.koji_proxy_user:
            kwargs['proxyuser'] = self.koji_proxy_user

        if self.koji_ssl_certs:
            # Use certificates
            self.log.info("Using SSL certificates for Koji authentication")
            session.ssl_login(os.path.join(self.koji_ssl_certs, 'cert'),
                              os.path.join(self.koji_ssl_certs, 'ca'),
                              os.path.join(self.koji_ssl_certs, 'serverca'),
                              **kwargs)
        else:
            # Use Kerberos
            self.log.info("Using Kerberos for Koji authentication")
            if self.koji_principal and self.koji_keytab:
                kwargs['principal'] = self.koji_principal
                kwargs['keytab'] = self.koji_keytab

            session.krb_login(**kwargs)

        return session

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        if not is_rebuild(self.workflow):
            self.log.info("Not promoting to koji: not a rebuild")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        session.CGImport(koji_metadata, server_dir)

        self.log.debug("Submitted with metadata: %s",
                       json.dumps(koji_metadata, sort_keys=True, indent=4))
class KojiUploadPlugin(PostBuildPlugin):
    """
    Upload this build to Koji

    Note: only the image archive is uploaded to Koji at this stage.
    Metadata about this image is created and stored in a ConfigMap in
    OpenShift, ready for the orchestrator build to collect and use to
    actually create the Koji Build together with the uploaded image
    archive(s).

    Authentication is with Kerberos unless the koji_ssl_certs_dir
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.
    """

    key = PLUGIN_KOJI_UPLOAD_PLUGIN_KEY
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url, build_json_dir,
                 koji_upload_dir, verify_ssl=True, use_auth=True,
                 koji_ssl_certs_dir=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 blocksize=None, prefer_schema1_digest=True):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param koji_upload_dir: str, path to use when uploading to hub
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs_dir: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        :param prefer_schema1_digest: bool, when True, v2 schema 1 digest will
            be preferred as the built image digest
        """
        super(KojiUploadPlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs_dir = koji_ssl_certs_dir
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.blocksize = blocksize
        self.build_json_dir = build_json_dir
        self.koji_upload_dir = koji_upload_dir
        self.prefer_schema1_digest = prefer_schema1_digest

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """

        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        try:
            buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        except KeyError:
            return ''

        try:
            pod = self.osbs.get_pod_for_build(self.build_id)
            all_images = pod.get_container_image_ids()
        except OsbsException as ex:
            self.log.error("unable to find image id: %r", ex)
            return buildroot_tag

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_info = self.tasker.get_info()
        host_arch, docker_version = get_docker_architecture(self.tasker)

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version,
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
        else:
            # Deleted once closed
            logfile = NamedTemporaryFile(prefix=self.build_id,
                                         suffix=".log",
                                         mode='wb')
            try:
                logfile.write(logs)
            except (TypeError, UnicodeEncodeError):
                # Older osbs-client versions returned Unicode objects
                logfile.write(logs.encode('utf-8'))
            logfile.flush()
            metadata = self.get_output_metadata(logfile.name,
                                                "openshift-final.log")
            output.append(Output(file=logfile, metadata=metadata))

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='wb')
        docker_logs.write("\n".join(self.workflow.build_result.logs).encode('utf-8'))
        docker_logs.flush()
        output.append(Output(file=docker_logs,
                             metadata=self.get_output_metadata(docker_logs.name,
                                                               "build.log")))
        return output

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            return []

        try:
            sep = PostBuildRPMqaPlugin.sep
        except AttributeError:
            # sep instance variable added in Aug 2016
            sep = ','

        return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags,
                                     separator=sep)

    def get_image_output(self, arch):
        """
        Create the output for the image

        This is the Koji Content Generator metadata, along with the
        'docker save' output to upload.

        :param arch: str, architecture for this output
        :return: tuple, (metadata dict, Output instance)

        """

        image_id = self.workflow.builder.image_id
        saved_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = saved_image.split('.', 1)[1]
        name_fmt = 'docker-image-{id}.{arch}.{ext}'
        image_name = name_fmt.format(id=image_id, arch=arch, ext=ext)
        metadata = self.get_output_metadata(saved_image, image_name)
        output = Output(file=open(saved_image), metadata=metadata)

        return metadata, output

    def get_digests(self):
        """
        Returns a map of repositories to digests
        """

        digests = {}  # repository -> digest
        for registry in self.workflow.push_conf.docker_registries:
            for image in self.workflow.tag_conf.images:
                image_str = image.to_str()
                if image_str in registry.digests:
                    digest = self.select_digest(registry.digests[image_str])
                    digests[image.to_str(registry=False)] = digest

        return digests

    def select_digest(self, digests):
        digest = digests.default

        # pulp/crane supports only manifest schema v1
        if self.prefer_schema1_digest:
            if self.workflow.push_conf.pulp_registries:
                self.log.info('Using schema v1 digest because of older Pulp integration')
                digest = digests.v1
            else:
                self.log.info('Schema v1 preferred, but not used')

        return digest

    def get_repositories(self, digests):
        """
        Build the repositories metadata

        :param digests: dict, repository -> digest
        """
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            image = self.pullspec_image.copy()
            image.registry = registry.uri
            pullspec = image.to_str()

            output_images.append(pullspec)

            digest = digests.get(image.to_str(registry=False))
            if digest:
                digest_pullspec = image.to_str(tag=False) + "@" + digest
                output_images.append(digest_pullspec)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output, arch):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': arch})
            return Output(file=logfile, metadata=metadata)

        arch = os.uname()[4]
        output_files = [add_log_type(add_buildroot_id(metadata), arch)
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']

        # Read config from the registry using v2 schema 2 digest
        registries = self.workflow.push_conf.docker_registries
        if registries:
            config = copy.deepcopy(registries[0].config)
        else:
            config = {}

        # We don't need container_config section
        if config and 'container_config' in config:
            del config['container_config']

        digests = self.get_digests()
        repositories = self.get_repositories(digests)
        tags = set(image.tag for image in self.workflow.tag_conf.images)
        metadata, output = self.get_image_output(arch)
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                    'tags': list(tags),
                    'config': config
                },
            },
        })

        if not config:
            del metadata['extra']['docker']['config']

        # Add the 'docker save' image to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """

        # krbV python library throws an error if these are unicode
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs_dir,
            "krb_principal": str(self.koji_principal),
            "krb_keytab": str(self.koji_keytab)
        }
        return create_koji_session(str(self.kojihub), auth_info)

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, self.koji_upload_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        md_fragment = "{}-md".format(get_build_json()['metadata']['name'])
        md_fragment_key = 'metadata.json'
        cm_data = {md_fragment_key: koji_metadata}
        annotations = {
            "metadata_fragment": "configmap/" + md_fragment,
            "metadata_fragment_key": md_fragment_key
        }

        try:
            self.osbs.create_config_map(md_fragment, cm_data)
        except OsbsException:
            self.log.debug("metadata: %r", koji_metadata)
            self.log.debug("annotations: %r", annotations)
            raise

        return annotations
예제 #4
0
class MockCreator(object):
    def __init__(self):
        parser = argparse.ArgumentParser(
            description="osbs test harness mock JSON creator")

        parser.add_argument(
            "user",
            action='store',
            help="name of user to use for Basic Authentication in OSBS")
        parser.add_argument("--config",
                            action='store',
                            metavar="PATH",
                            help="path to configuration file",
                            default=DEFAULT_CONFIGURATION_FILE)
        parser.add_argument(
            "--instance",
            "-i",
            action='store',
            metavar="SECTION_NAME",
            help="section within config for requested instance",
            default="stage")
        parser.add_argument(
            "--password",
            action='store',
            help="password to use for Basic Authentication in OSBS")
        parser.add_argument("--mock-dir",
                            metavar="DIR",
                            action="store",
                            default=DEFAULT_DIR,
                            help="mock JSON responses are stored in DIR")
        parser.add_argument(
            "--imagestream",
            metavar="IMAGESTREAM",
            action="store",
            default=DEFAULT_IMAGESTREAM_FILE,
            help="Image name for image stream import. Defaults to " +
            DEFAULT_IMAGESTREAM_FILE)
        parser.add_argument(
            "--image_server",
            metavar="IMAGESERVER",
            action="store",
            default=DEFAULT_IMAGESTREAM_SERVER,
            help="Server for image stream import. Defaults to " +
            DEFAULT_IMAGESTREAM_SERVER)
        parser.add_argument(
            "--image_tags",
            metavar="IMAGETAGS",
            action="store",
            nargs=3,
            default=DEFAULT_IMAGESTREAM_TAGS,
            help="Image stream tags as 3 space separated values.")
        parser.add_argument("--os-version",
                            metavar="OS_VER",
                            action="store",
                            default=OS_VERSION,
                            help="OpenShift version of the mock JSONs")

        args = parser.parse_args()
        self.user = args.user
        self.password = args.password

        mock_path = args.mock_dir
        self.mock_dir = "/".join([mock_path, args.os_version])
        find_or_make_dir(self.mock_dir)

        self.capture_dir = tempfile.mkdtemp()

        args.git_url = "https://github.com/TomasTomecek/docker-hello-world.git"
        args.git_branch = "master"
        args.git_commit = "HEAD"

        os_conf = Configuration(conf_file=args.config,
                                conf_section=args.instance,
                                cli_args=args)
        build_conf = Configuration(conf_file=args.config,
                                   conf_section=args.instance,
                                   cli_args=args)

        set_logging(level=logging.INFO)

        self.osbs = OSBS(os_conf, build_conf)
        setup_json_capture(self.osbs, os_conf, self.capture_dir)

        self.imagestream_file = args.imagestream
        self.imagestream_server = args.image_server
        self.imagestream_tags = args.image_tags
        self.rh_pattern = re.template("redhat.com")
        self.ex_pattern = "(\S+\.)*redhat.com"  # noqa:W605

    def clean_data(self, out_data):
        if isinstance(out_data, dict):
            cleaned_data = {}
            for key, data in out_data.items():
                cleaned_data[key] = self.clean_data(data)
            return cleaned_data
        elif isinstance(out_data, list):
            cleaned_data = []
            for data in out_data:
                cleaned_data.append(self.clean_data(data))
            return cleaned_data
        elif isinstance(out_data, str):
            if re.search(self.rh_pattern, out_data):
                return re.sub(self.ex_pattern, "example.com", out_data)
            else:
                return out_data
        else:
            return out_data

    def comp_write(self, out_name, out_data):
        cleaned_data = self.clean_data(out_data)
        out_path = "/".join([self.mock_dir, out_name])
        with open(out_path, "w") as outf:
            try:
                json.dump(cleaned_data, outf, indent=4)
            except (ValueError, TypeError):
                outf.write(json.dumps(cleaned_data, indent=4))

    def create_mock_builds_list(self):
        kwargs = {}
        # get build list
        self.osbs.list_builds(**kwargs)
        # find 'get-namespaces_osbs-stage_builds_-000.json' file and parse it into
        # 'builds_list.json', 'builds_list_empty.json', 'builds_list_one.json'
        all_builds = "get-namespaces_osbs-stage_builds_-000.json"
        all_builds_path = "/".join([self.capture_dir, all_builds])
        with open(all_builds_path, "r") as infile:
            builds_data = json.load(infile)
            builds_items = copy.copy(builds_data["items"])
            builds_data["items"] = []
            self.comp_write("builds_list_empty.json", builds_data)
            if not builds_items:
                return
            builds_data["items"].append(builds_items[0])
            self.comp_write("builds_list_one.json", builds_data)
            if len(builds_items) < 2:
                return
            builds_data["items"].append(builds_items[1])
            self.comp_write("builds_list.json", builds_data)
        os.remove(all_builds_path)

    def create_pods_list(self, build_id):
        self.osbs.get_pod_for_build(build_id)
        pods_pre = "get-namespaces_osbs-stage_pods_?labelSelector=openshift.io%2Fbuild.name%3D"
        for i in range(0, 4):
            try:
                pods_fname = pods_pre + build_id + "-00{}.json".format(i)
                pods_inpath = "/".join([self.capture_dir, pods_fname])
                os.stat(pods_inpath)
                break
            except OSError:
                continue

        with open(pods_inpath, "r") as infile:
            pods_data = json.load(infile)
            image = "buildroot:latest"
            pods_items = pods_data["items"] or []
            for pod in pods_items:
                pod_containers = pod["status"]["containerStatuses"] or []
                for container in pod_containers:
                    container["imageID"] = "docker-pullable://" + image
                    container["image"] = image
            self.comp_write("pods.json", pods_data)
        os.remove(pods_inpath)

    def create_mock_get_user(self):
        self.osbs.get_user()
        user_list = "get-users_~_-000.json"
        user_list_path = "/".join([self.capture_dir, user_list])
        with open(user_list_path, "r") as infile:
            user_data = json.load(infile)
            user_data["groups"] = []
            user_data["identities"] = None
            if "fullName" in user_data:
                del user_data["fullName"]
            user_data["metadata"]["name"] = "test"
            user_data["metadata"][
                "selfLink"] = "/apis/user.openshift.io/v1/users/test"
            self.comp_write("get_user.json", user_data)
        os.remove(user_list_path)

    def create_a_mock_build(self, func, build_name, out_tag, build_args):
        try:
            build = func(**build_args)
            build_id = build.get_build_name()
            build = self.osbs.wait_for_build_to_get_scheduled(build_id)
            self.osbs.watch_builds()
            build = self.osbs.wait_for_build_to_finish(build_id)
            self.osbs.get_build_logs(build_id)
        except (subprocess.CalledProcessError, OsbsException):
            pass

        watch_data = canonize_data(copy.deepcopy(build.json), build_name,
                                   "Complete")
        watch_obj = {"object": watch_data, "type": "MODIFIED"}
        out_name = "watch_build_test-" + out_tag + "build-123.json"
        self.comp_write(out_name, watch_obj)

        build_fname = "get-namespaces_osbs-stage_builds_" + build_id + "_-000.json"
        build_path = "/".join([self.capture_dir, build_fname])
        with open(build_path, "r") as infile:
            build_data = canonize_data(json.load(infile), build_name,
                                       "Complete")
            out_name = "build_test-" + out_tag + "build-123.json"
            self.comp_write(out_name, build_data)
        os.remove(build_path)
        return out_name

    def create_mock_build(self):
        build_kwargs = {
            'git_uri': self.osbs.build_conf.get_git_uri(),
            'git_ref': self.osbs.build_conf.get_git_ref(),
            'git_branch': self.osbs.build_conf.get_git_branch(),
            'user': self.osbs.build_conf.get_user(),
            'release': TEST_BUILD,
            'platform': "x86_64",
            'scratch': True,
        }

        build_name = self.create_a_mock_build(self.osbs.create_worker_build,
                                              TEST_BUILD, "", build_kwargs)
        build_path = "/".join([self.mock_dir, build_name])
        if os.stat(build_path):
            with open(build_path, "r") as infile:
                build_data = json.load(infile)
                build_data["kind"] = "BuildConfig"
                self.comp_write(
                    "created_build_config_test-build-config-123.json",
                    build_data)

        del build_kwargs['platform']

        self.create_a_mock_build(self.osbs.create_orchestrator_build,
                                 TEST_ORCHESTRATOR_BUILD, "orchestrator-",
                                 build_kwargs)

    def create_mock_build_other(self):
        build_kwargs = {
            'git_uri': self.osbs.build_conf.get_git_uri(),
            'git_ref': self.osbs.build_conf.get_git_ref(),
            'git_branch': self.osbs.build_conf.get_git_branch(),
            'user': self.osbs.build_conf.get_user(),
            'release': TEST_BUILD,
            'platform': "x86_64",
            'scratch': True,
        }
        build_id = ""
        try:
            build = self.osbs.create_worker_build(**build_kwargs)
            build_id = build.get_build_name()
            self.osbs.wait_for_build_to_get_scheduled(build_id)
            self.create_pods_list(build_id)
            self.osbs.cancel_build(build_id)
            self.osbs.wait_for_build_to_finish(build_id)
            self.osbs.get_build_logs(build_id)
        except OsbsException:
            self.create_pods_list(build_id)

        instant_fname = "post-namespaces_osbs-stage_builds_-000.json"
        instant_path = "/".join([self.capture_dir, instant_fname])
        with open(instant_path, "r") as infile:
            instant_data = canonize_data(json.load(infile))
            self.comp_write("instantiated_test-build-config-123.json",
                            instant_data)
        os.remove(instant_path)

        cancel_args = [
            {
                "suffix": "_-000-001.json",
                "version": "get",
                "phase": None
            },
            {
                "suffix": "_-000-000.json",
                "version": "put",
                "phase": "Cancelled"
            },
        ]
        for data in cancel_args:
            build_fname = "get-watch_namespaces_osbs-stage_builds_" + build_id + data[
                "suffix"]
            build_path = "/".join([self.capture_dir, build_fname])
            with open(build_path, "r") as infile:
                cancel_obj = json.load(infile)
                cancel_data = canonize_data(
                    copy.deepcopy(cancel_obj["object"]), TEST_CANCELLED_BUILD,
                    data["phase"])
                self.comp_write(
                    "build_test-build-cancel-123_" + data["version"] + ".json",
                    cancel_data)
            os.remove(build_path)

    def create_mock_static_files(self):
        # these aren't JSON, so just write them out
        out_path = "/".join(
            [self.mock_dir, "build_test-orchestrator-build-123_logs.txt"])
        with open(out_path, "w") as outf:
            outf.write(ORCH_BUILD_LOG)
        out_path = "/".join([self.mock_dir, "build_test-build-123_logs.txt"])
        with open(out_path, "wb") as outf:
            outf.write(BASE_BUILD_LOG)

        self.comp_write("create_config_map.json", MOCK_CONFIG_MAP)
예제 #5
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the v1 image will not be uploaded, only
    the logs. The import will be marked as metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = "koji_promote"
    is_allowed_to_fail = False

    def __init__(self,
                 tasker,
                 workflow,
                 kojihub,
                 url,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 metadata_only=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the v1 image
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only

        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=url,
                                  use_auth=use_auth,
                                  verify_ssl=verify_ssl)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.namespace = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """
        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {
            'filename': filename,
            'filesize': os.path.getsize(path),
            'checksum': checksums['md5sum'],
            'checksum_type': 'md5'
        }

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        kwargs = {}
        if self.namespace is not None:
            kwargs['namespace'] = self.namespace
        pod = self.osbs.get_pod_for_build(self.build_id, **kwargs)
        all_images = pod.get_container_image_ids()

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_version = self.tasker.get_version()
        docker_info = self.tasker.get_info()
        host_arch = docker_version['Arch']
        if host_arch == 'amd64':
            host_arch = 'x86_64'

        buildroot = {
            'id':
            1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [{
                'name': tool['name'],
                'version': tool['version'],
            } for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version['Version'],
                },
            ],
            'components':
            self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        # Collect logs from server
        kwargs = {}
        if self.namespace is not None:
            kwargs['namespace'] = self.namespace
        logs = self.osbs.get_build_logs(self.build_id, **kwargs)

        # Deleted once closed
        logfile = NamedTemporaryFile(prefix=self.build_id,
                                     suffix=".log",
                                     mode='w')
        logfile.write(logs)
        logfile.flush()

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='w')
        docker_logs.write("\n".join(self.workflow.build_logs))
        docker_logs.flush()

        return [
            Output(file=docker_logs,
                   metadata=self.get_output_metadata(docker_logs.name,
                                                     "build.log")),
            Output(file=logfile,
                   metadata=self.get_output_metadata(logfile.name,
                                                     "openshift-final.log"))
        ]

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!", PostBuildRPMqaPlugin.key)
            return []

        return self.parse_rpm_output(output,
                                     PostBuildRPMqaPlugin.rpm_tags,
                                     separator=',')

    def get_image_output(self):
        """
        Create the output for the image

        For v1, this is the v1 image. For v2, this is the v2 metadata
        with the checksum of an empty file, and no actual upload.

        :return: tuple, (metadata dict, Output instance)
        """

        image_id = self.workflow.builder.image_id
        v1_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = v1_image.split('.', 1)[1]
        if self.metadata_only:
            v2_image_name = 'docker-v2-image-{0}.{1}'.format(image_id, ext)
            metadata = self.get_output_metadata(os.path.devnull, v2_image_name)
            output = Output(file=None, metadata=metadata)
        else:
            v1_image_name = 'docker-v1-image-{0}.{1}'.format(image_id, ext)
            metadata = self.get_output_metadata(v1_image, v1_image_name)
            output = Output(file=open(v1_image), metadata=metadata)

        return metadata, output

    def get_output_images(self):
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            for image in (self.workflow.tag_conf.primary_images +
                          self.workflow.tag_conf.unique_images):
                registry_image = image.copy()
                registry_image.registry = registry.uri
                if registry_image not in output_images:
                    output_images.append(registry_image)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """
        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [
            add_log_type(add_buildroot_id(metadata))
            for metadata in self.get_logs()
        ]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']
        output_images = self.get_output_images()
        repositories = [
            image.to_str() for image in output_images if image.tag != 'latest'
        ]
        arch = os.uname()[4]
        metadata, output = self.get_image_output()
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                },
            },
        })

        # Add the v1 image (or v2 metadata) to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        build_start_time = metadata["creationTimestamp"]
        try:
            # Decode UTC RFC3339 date with no fractional seconds
            # (the format we expect)
            start_time_struct = time.strptime(build_start_time,
                                              '%Y-%m-%dT%H:%M:%SZ')
            start_time = int(time.mktime(start_time_struct))
        except ValueError:
            self.log.error("Invalid time format (%s)", build_start_time)
            raise

        name = None
        version = None
        release = None
        for image_name in self.workflow.tag_conf.primary_images:
            if '-' in image_name.tag:
                name = image_name.repo
                version, release = image_name.tag.split('-', 1)

        if name is None or version is None or release is None:
            raise RuntimeError('Unable to determine name-version-release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        build = {
            'name': name,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': {
                'image': {},
            },
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        try:
            metadata = build_json["metadata"]
            self.build_id = metadata["name"]
            self.namespace = metadata.get("namespace")
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r", output.file.name, serverdir,
                       name)
        session.uploadWrapper(output.file.name, serverdir, name=name)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join(
            [random.choice(ascii_letters) for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """
        session = koji.ClientSession(self.kojihub)
        kwargs = {}
        if self.koji_proxy_user:
            kwargs['proxyuser'] = self.koji_proxy_user

        if self.koji_ssl_certs:
            # Use certificates
            self.log.info("Using SSL certificates for Koji authentication")
            session.ssl_login(os.path.join(self.koji_ssl_certs, 'cert'),
                              os.path.join(self.koji_ssl_certs, 'ca'),
                              os.path.join(self.koji_ssl_certs, 'serverca'),
                              **kwargs)
        else:
            # Use Kerberos
            self.log.info("Using Kerberos for Koji authentication")
            if self.koji_principal and self.koji_keytab:
                kwargs['principal'] = self.koji_principal
                kwargs['keytab'] = self.koji_keytab

            session.krb_login(**kwargs)

        return session

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab)
                or (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        if not is_rebuild(self.workflow):
            self.log.info("Not promoting to koji: not a rebuild")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        session.CGImport(koji_metadata, server_dir)

        self.log.debug("Submitted with metadata: %s",
                       json.dumps(koji_metadata, sort_keys=True, indent=4))
예제 #6
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the 'docker save' image will not be
    uploaded, only the logs. The import will be marked as
    metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = PLUGIN_KOJI_PROMOTE_PLUGIN_KEY
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        cmd = "/bin/rpm " + rpm_qf_args(tags)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return parse_rpm_output(output.splitlines(), tags)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        try:
            buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        except KeyError:
            return ''

        try:
            pod = self.osbs.get_pod_for_build(self.build_id)
            all_images = pod.get_container_image_ids()
        except OsbsException as ex:
            self.log.error("unable to find image id: %r", ex)
            return buildroot_tag

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_info = self.tasker.get_info()
        host_arch, docker_version = get_docker_architecture(self.tasker)

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version,
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
        else:
            # Deleted once closed
            logfile = NamedTemporaryFile(prefix=self.build_id,
                                         suffix=".log",
                                         mode='wb')
            try:
                logfile.write(logs)
            except (TypeError, UnicodeEncodeError):
                # Older osbs-client versions returned Unicode objects
                logfile.write(logs.encode('utf-8'))
            logfile.flush()
            metadata = self.get_output_metadata(logfile.name,
                                                "openshift-final.log")
            output.append(Output(file=logfile, metadata=metadata))

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='wb')
        docker_logs.write("\n".join(self.workflow.build_result.logs).encode('utf-8'))
        docker_logs.flush()
        output.append(Output(file=docker_logs,
                             metadata=self.get_output_metadata(docker_logs.name,
                                                               "build.log")))
        return output

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        output = self.workflow.image_components
        if output is None:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            output = []

        return output

    def get_image_output(self, arch):
        """
        Create the output for the image

        This is the Koji Content Generator metadata, along with the
        'docker save' output to upload.

        For metadata-only builds, an empty file is used instead of the
        output of 'docker save'.

        :param arch: str, architecture for this output
        :return: tuple, (metadata dict, Output instance)

        """

        saved_image = self.workflow.exported_image_sequence[-1].get('path')
        image_name = get_image_upload_filename(self.workflow.exported_image_sequence[-1],
                                               self.workflow.builder.image_id,
                                               arch)
        if self.metadata_only:
            metadata = self.get_output_metadata(os.path.devnull, image_name)
            output = Output(file=None, metadata=metadata)
        else:
            metadata = self.get_output_metadata(saved_image, image_name)
            output = Output(file=open(saved_image), metadata=metadata)

        return metadata, output

    def get_digests(self):
        """
        Returns a map of images to their digests
        """

        try:
            pulp = get_manifests_in_pulp_repository(self.workflow)
        except KeyError:
            pulp = None

        digests = {}  # repository -> digests
        for registry in self.workflow.push_conf.docker_registries:
            for image in self.workflow.tag_conf.images:
                image_str = image.to_str()
                if image_str in registry.digests:
                    image_digests = registry.digests[image_str]
                    if pulp is None:
                        digest_list = [image_digests.default]
                    else:
                        # If Pulp is enabled, only report digests that
                        # were synced into Pulp. This may not be all
                        # of them, depending on whether Pulp has
                        # schema 2 support.
                        digest_list = [digest for digest in (image_digests.v1,
                                                             image_digests.v2)
                                       if digest in pulp]

                    digests[image.to_str(registry=False)] = digest_list

        return digests

    def get_repositories(self, digests):
        """
        Build the repositories metadata

        :param digests: dict, image -> digests
        """
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            image = self.pullspec_image.copy()
            image.registry = registry.uri
            pullspec = image.to_str()

            output_images.append(pullspec)

            digest_list = digests.get(image.to_str(registry=False), ())
            for digest in digest_list:
                digest_pullspec = image.to_str(tag=False) + "@" + digest
                output_images.append(digest_pullspec)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']

        # Read config from the registry using v2 schema 2 digest
        registries = self.workflow.push_conf.docker_registries
        if registries:
            config = copy.deepcopy(registries[0].config)
        else:
            config = {}

        # We don't need container_config section
        if config and 'container_config' in config:
            del config['container_config']

        digests = self.get_digests()
        repositories = self.get_repositories(digests)
        arch = os.uname()[4]
        tags = set(image.tag for image in self.workflow.tag_conf.primary_images)
        metadata, output = self.get_image_output(arch)
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                    'layer_sizes': self.workflow.layer_sizes,
                    'tags': list(tags),
                    'config': config
                },
            },
        })
        annotations = self.workflow.build_result.annotations
        if annotations and 'digests' in annotations:
            digests = get_digests_map_from_annotations(annotations['digests'])
            metadata['extra']['docker']['digests'] = digests

        if not config:
            del metadata['extra']['docker']['config']

        # Add the 'docker save' image to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}

        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(set(pulp_pull_results)))

        # Append parent_build_id from koji parent
        parent_results = self.workflow.prebuild_results.get(PLUGIN_KOJI_PARENT_KEY) or {}
        parent_id = parent_results.get('parent-image-koji-build', {}).get('id')
        if parent_id is not None:
            try:
                parent_id = int(parent_id)
            except ValueError:
                self.log.exception("invalid koji parent id %r", parent_id)
            else:
                extra['image']['parent_build_id'] = parent_id

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            extra['image'].update(flatpak_source_info.koji_metadata())

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """

        # krbV python library throws an error if these are unicode
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs,
            "krb_principal": str(self.koji_principal),
            "krb_keytab": str(self.koji_keytab)
        }
        return create_koji_session(str(self.kojihub), auth_info)

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        try:
            build_info = session.CGImport(koji_metadata, server_dir)
        except Exception:
            self.log.debug("metadata: %r", koji_metadata)
            raise

        # Older versions of CGImport do not return a value.
        build_id = build_info.get("id") if build_info else None

        self.log.debug("Build information: %s",
                       json.dumps(build_info, sort_keys=True, indent=4))

        # If configured, koji_tag_build plugin will perform build tagging
        tag_later = are_plugins_in_order(self.workflow.exit_plugins_conf,
                                         PLUGIN_KOJI_PROMOTE_PLUGIN_KEY,
                                         PLUGIN_KOJI_TAG_BUILD_KEY)
        if not tag_later and build_id is not None and self.target is not None:
            tag_koji_build(session, build_id, self.target,
                           poll_interval=self.poll_interval)

        return build_id
예제 #7
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the 'docker save' image will not be
    uploaded, only the logs. The import will be marked as
    metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = "koji_promote"
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.nvr_image = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """

        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        try:
            buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        except KeyError:
            return ''

        try:
            pod = self.osbs.get_pod_for_build(self.build_id)
            all_images = pod.get_container_image_ids()
        except OsbsException as ex:
            self.log.error("unable to find image id: %r", ex)
            return buildroot_tag

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_version = self.tasker.get_version()
        docker_info = self.tasker.get_info()
        host_arch = docker_version['Arch']
        if host_arch == 'amd64':
            host_arch = 'x86_64'

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version['Version'],
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
        else:
            # Deleted once closed
            logfile = NamedTemporaryFile(prefix=self.build_id,
                                         suffix=".log",
                                         mode='w')
            logfile.write(logs)
            logfile.flush()
            metadata = self.get_output_metadata(logfile.name,
                                                "openshift-final.log")
            output.append(Output(file=logfile, metadata=metadata))

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='w')
        docker_logs.write("\n".join(self.workflow.build_logs))
        docker_logs.flush()
        output.append(Output(file=docker_logs,
                             metadata=self.get_output_metadata(docker_logs.name,
                                                               "build.log")))
        return output

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            return []

        return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags,
                                     separator=',')

    def get_image_output(self, arch):
        """
        Create the output for the image

        This is the Koji Content Generator metadata, along with the
        'docker save' output to upload.

        For metadata-only builds, an empty file is used instead of the
        output of 'docker save'.

        :param arch: str, architecture for this output
        :return: tuple, (metadata dict, Output instance)

        """

        image_id = self.workflow.builder.image_id
        saved_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = saved_image.split('.', 1)[1]
        name_fmt = 'docker-image-{id}.{arch}.{ext}'
        image_name = name_fmt.format(id=image_id, arch=arch, ext=ext)
        if self.metadata_only:
            metadata = self.get_output_metadata(os.path.devnull, image_name)
            output = Output(file=None, metadata=metadata)
        else:
            metadata = self.get_output_metadata(saved_image, image_name)
            output = Output(file=open(saved_image), metadata=metadata)

        return metadata, output

    def get_digests(self):
        """
        Returns a map of repositories to digests
        """

        digests = {}  # repository -> digest
        for registry in self.workflow.push_conf.docker_registries:
            for image in self.workflow.tag_conf.images:
                image_str = image.to_str()
                if image_str in registry.digests:
                    digest = registry.digests[image_str]
                    digests[image.to_str(registry=False)] = digest

        return digests

    def get_repositories(self, digests):
        """
        Build the repositories metadata

        :param digests: dict, repository -> digest
        """
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            image = self.nvr_image.copy()
            image.registry = registry.uri
            pullspec = image.to_str()

            output_images.append(pullspec)

            digest = digests.get(image.to_str(registry=False))
            if digest:
                digest_pullspec = image.to_str(tag=False) + "@" + digest
                output_images.append(digest_pullspec)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']
        digests = self.get_digests()
        repositories = self.get_repositories(digests)
        arch = os.uname()[4]
        metadata, output = self.get_image_output(arch)
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                },
            },
        })

        # Add the 'docker save' image to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)
        labels = DockerfileParser(self.workflow.builder.df_path).labels
        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            extra['container_koji_task_id'] = koji_task_id

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                extra['filesystem_koji_task_id'] = str(task_id)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.nvr_image = image
                break
        else:
            raise RuntimeError('Unable to determine name:version-release')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs,
            "krb_principal": self.koji_principal,
            "krb_keytab": self.koji_keytab
        }
        return create_koji_session(self.kojihub, auth_info)

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        try:
            build_info = session.CGImport(koji_metadata, server_dir)
        except Exception:
            self.log.debug("metadata: %r", koji_metadata)
            raise

        # Older versions of CGImport do not return a value.
        build_id = build_info.get("id") if build_info else None

        self.log.debug("Build information: %s",
                       json.dumps(build_info, sort_keys=True, indent=4))

        # Tag the build
        if build_id is not None and self.target is not None:
            self.log.debug("Finding build tag for target %s", self.target)
            target_info = session.getBuildTarget(self.target)
            build_tag = target_info['dest_tag_name']
            self.log.info("Tagging build with %s", build_tag)
            task_id = session.tagBuild(build_tag, build_id)
            task = TaskWatcher(session, task_id,
                               poll_interval=self.poll_interval)
            task.wait()
            if task.failed():
                raise RuntimeError("Task %s failed to tag koji build" % task_id)

        return build_id