Ejemplo n.º 1
0
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl)
        osbs = OSBS(osbs_conf, osbs_conf)

        metadata = build_json.get("metadata", {})
        kwargs = {}
        if 'namespace' in metadata:
            kwargs['namespace'] = metadata['namespace']

        labels = metadata.get("labels", {})
        try:
            imagestream = labels["imagestream"]
        except KeyError:
            self.log.error("No imagestream label set for this Build")
            raise

        self.log.info("Importing tags for %s", imagestream)
        osbs.import_image(imagestream, **kwargs)
Ejemplo n.º 2
0
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl)
        osbs = OSBS(osbs_conf, osbs_conf)

        metadata = build_json.get("metadata", {})
        kwargs = {}
        if 'namespace' in metadata:
            kwargs['namespace'] = metadata['namespace']

        labels = metadata.get("labels", {})
        try:
            imagestream = labels["imagestream"]
        except KeyError:
            self.log.error("No imagestream label set for this Build")
            raise

        self.log.info("Importing tags for %s", imagestream)
        osbs.import_image(imagestream, **kwargs)
Ejemplo n.º 3
0
def osbs106(openshift):
    with NamedTemporaryFile(mode="wt") as fp:
        fp.write("""
[general]
build_json_dir = {build_json_dir}
openshift_required_version = 1.0.6
[default]
openshift_url = /
registry_uri = registry.example.com
sources_command = fedpkg sources
vendor = Example, Inc.
build_host = localhost
authoritative_registry = registry.example.com
distribution_scope = authoritative-source-only
koji_root = http://koji.example.com/kojiroot
koji_hub = http://koji.example.com/kojihub
build_type = simple
use_auth = false
""".format (build_json_dir="inputs"))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
Ejemplo n.º 4
0
def osbs(request, openshift):
    kwargs = request.param['kwargs'] or {}

    kwargs.setdefault('build_json_dir', 'inputs')
    kwargs.setdefault('additional_general', '')
    with NamedTemporaryFile(mode="wt") as fp:
        config = dedent("""\
            [general]
            build_json_dir = {build_json_dir}
            {additional_general}

            [default]
            openshift_url = /
            flatpak_base_image = registry.fedoraproject.org/fedora:latest
            can_orchestrate = true
            use_auth = false
            build_from = image:buildroot:latest
            """)

        if request.param['additional_config'] is not None:
            config += request.param['additional_config']
            config += '\n'

        fp.write(config.format(**kwargs))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
Ejemplo n.º 5
0
def osbs106(openshift):
    with NamedTemporaryFile(mode="wt") as fp:
        fp.write("""
[general]
build_json_dir = {build_json_dir}
openshift_required_version = 1.0.6
[default]
openshift_url = /
registry_uri = registry.example.com
sources_command = fedpkg sources
vendor = Example, Inc.
build_host = localhost
authoritative_registry = registry.example.com
distribution_scope = authoritative-source-only
koji_root = http://koji.example.com/kojiroot
koji_hub = http://koji.example.com/kojihub
use_auth = false
build_from = image:buildroot:latest
""".format(build_json_dir="inputs"))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
Ejemplo n.º 6
0
def test_no_inputs():
    with NamedTemporaryFile(mode='w+') as f:
        f.write("""
[general]
build_json_dir=/nonexistent/path/

[default]
build_type=simple
openshift_uri=https://172.0.0.1:8443/
registry_uri=127.0.0.1:5000
""")
        f.flush()
        f.seek(0)
        with pytest.raises(OsbsException):
            os_conf = Configuration(conf_file=f.name,
                                    conf_section="default")
            build_conf = Configuration(conf_file=f.name,
                                       conf_section="default")
            osbs = OSBS(os_conf, build_conf)
            osbs.create_build(git_uri="https://example.com/example.git",
                              git_ref="master",
                              user="******",
                              component="component",
                              target="target",
                              architecture="arch")
Ejemplo n.º 7
0
def cmd_build_source_container(args):
    if args.instance is None:
        conf_section = DEFAULT_CONF_SOURCE_SECTION
    else:
        conf_section = args.instance
    os_conf = Configuration(conf_file=args.config,
                            conf_section=conf_section,
                            cli_args=args)
    osbs = OSBS(os_conf)

    build_kwargs = {
        'user': osbs.os_conf.get_user(),
        'target': osbs.os_conf.get_koji_target(),
        'scratch': args.scratch,
        'signing_intent': args.signing_intent,
        'sources_for_koji_build_nvr': args.sources_for_koji_build_nvr,
        'sources_for_koji_build_id': args.sources_for_koji_build_id,
        'component': args.component,
    }
    if args.userdata:
        build_kwargs['userdata'] = json.loads(args.userdata)

    pipeline_run = osbs.create_source_container_pipeline_run(**build_kwargs)

    print_output(pipeline_run, export_metadata_file=args.export_metadata_file)

    return_val = -1

    if pipeline_run.has_succeeded():
        return_val = 0
    return return_val
    def run(self):
        """
        run the plugin
        """

        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container")
            raise

        metadata = build_json.get("metadata", {})
        labels = metadata.get("labels", {})
        buildconfig = labels["buildconfig"]
        is_rebuild = labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            kwargs = {}
            if 'namespace' in metadata:
                kwargs['namespace'] = metadata['namespace']

            osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                      use_auth=self.use_auth,
                                      verify_ssl=self.verify_ssl)
            osbs = OSBS(osbs_conf, osbs_conf)
            labels = {self.label_key: self.label_value}
            osbs.set_labels_on_build_config(buildconfig, labels, **kwargs)

        return is_rebuild
    def run(self):
        """
        run the plugin
        """

        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container")
            raise

        metadata = build_json.get("metadata", {})
        labels = metadata.get("labels", {})
        buildconfig = labels["buildconfig"]
        is_rebuild = labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            kwargs = {}
            if 'namespace' in metadata:
                kwargs['namespace'] = metadata['namespace']

            # FIXME: remove `openshift_uri` once osbs-client is released
            osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                      openshift_url=self.url,
                                      use_auth=self.use_auth,
                                      verify_ssl=self.verify_ssl)
            osbs = OSBS(osbs_conf, osbs_conf)
            labels = {self.label_key: self.label_value}
            osbs.set_labels_on_build_config(buildconfig, labels, **kwargs)

        return is_rebuild
Ejemplo n.º 10
0
    def __init__(self,
                 tasker,
                 workflow,
                 kojihub,
                 url,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 metadata_only=False,
                 blocksize=None,
                 target=None,
                 poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata',
                                              {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=url,
                                  use_auth=use_auth,
                                  verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None
Ejemplo n.º 11
0
 def setup_osbs_api(self):
     metadata = get_build_json().get("metadata", {})
     osbs_conf = Configuration(conf_file=None,
                               openshift_url=self.url,
                               use_auth=self.use_auth,
                               verify_ssl=self.verify_ssl,
                               build_json_dir=self.build_json_dir,
                               namespace=metadata.get('namespace', None))
     self.osbs = OSBS(osbs_conf, osbs_conf)
Ejemplo n.º 12
0
    def __init__(self,
                 tasker,
                 workflow,
                 kojihub,
                 url,
                 build_json_dir,
                 koji_upload_dir,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs_dir=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 blocksize=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param koji_upload_dir: str, path to use when uploading to hub
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs_dir: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        """
        super(KojiUploadPlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs_dir = koji_ssl_certs_dir
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.blocksize = blocksize
        self.build_json_dir = build_json_dir
        self.koji_upload_dir = koji_upload_dir

        self.namespace = get_build_json().get('metadata',
                                              {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=url,
                                  use_auth=use_auth,
                                  verify_ssl=verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None
Ejemplo n.º 13
0
def osbs(openshift,
         kwargs=None,
         additional_config=None,
         platform_descriptors=None):
    kwargs = kwargs or {}
    platform_descriptors = platform_descriptors or {}

    kwargs.setdefault('build_json_dir', 'inputs')
    kwargs.setdefault('registry_uri', 'registry.example.com')
    kwargs.setdefault('additional_general', '')
    with NamedTemporaryFile(mode="wt") as fp:
        config = dedent("""\
            [general]
            build_json_dir = {build_json_dir}
            {additional_general}

            [default]
            openshift_url = /
            registry_uri = {registry_uri}
            sources_command = fedpkg sources
            vendor = Example, Inc.
            build_host = localhost
            authoritative_registry = registry.example.com
            distribution_scope = authoritative-source-only
            koji_root = http://koji.example.com/kojiroot
            koji_hub = http://koji.example.com/kojihub
            flatpak_base_image = registry.fedoraproject.org/fedora:latest
            odcs_url = https://odcs.example.com/odcs/1
            pdc_url = https://pdc.example.com/rest_api/v1
            use_auth = false
            can_orchestrate = true
            build_from = image:buildroot:latest
            """)

        if additional_config is not None:
            config += additional_config
            config += '\n'

        for platform, platform_info in platform_descriptors.items():
            if not platform_info:
                continue

            config += '[platform:{0}]\n'.format(platform)
            for item, value in platform_info.items():
                config += '{0} = {1}\n'.format(item, value)

        fp.write(config.format(**kwargs))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
Ejemplo n.º 14
0
    def get_plugins_with_user_data(self, user_params, user_data):
        #  get the reactor config map and derive an osbs instance from it

        from osbs.api import OSBS
        from osbs.conf import Configuration

        reactor_config_override = user_data.get('reactor_config_override')
        if reactor_config_override:
            read_yaml(json.dumps(reactor_config_override), 'schemas/config.json')

        osbs_conf = Configuration(build_json_dir=user_data.get('build_json_dir'))
        osbs = OSBS(osbs_conf, osbs_conf)
        return osbs.render_plugins_configuration(user_params)
Ejemplo n.º 15
0
def get_plugins_with_user_data(user_params, user_data):
    """Get the reactor config map and derive an osbs instance from it"""

    from osbs.api import OSBS
    from osbs.conf import Configuration

    reactor_config_override = user_data.get('reactor_config_override')
    if reactor_config_override:
        read_yaml(json.dumps(reactor_config_override), 'schemas/config.json')

    osbs_conf = Configuration(build_json_dir=user_data.get('build_json_dir'))
    osbs = OSBS(osbs_conf, osbs_conf)
    return osbs.render_plugins_configuration(user_params)
Ejemplo n.º 16
0
def cmd_build(args):
    if args.instance is None:
        conf_section = DEFAULT_CONF_BINARY_SECTION
    else:
        conf_section = args.instance
    os_conf = Configuration(conf_file=args.config,
                            conf_section=conf_section,
                            cli_args=args)
    osbs = OSBS(os_conf)

    build_kwargs = {
        'git_uri': osbs.os_conf.get_git_uri(),
        'git_ref': osbs.os_conf.get_git_ref(),
        'git_branch': osbs.os_conf.get_git_branch(),
        'user': osbs.os_conf.get_user(),
        'tag': osbs.os_conf.get_tag(),
        'target': osbs.os_conf.get_koji_target(),
        'yum_repourls': osbs.os_conf.get_yum_repourls(),
        'dependency_replacements': osbs.os_conf.get_dependency_replacements(),
        'scratch': args.scratch,
        'platforms': args.platforms,
        'release': args.release,
        'koji_parent_build': args.koji_parent_build,
        'isolated': args.isolated,
        'signing_intent': args.signing_intent,
        'compose_ids': args.compose_ids,
        'operator_csv_modifications_url': args.operator_csv_modifications_url,
    }
    if args.userdata:
        build_kwargs['userdata'] = json.loads(args.userdata)
    if osbs.os_conf.get_flatpak():
        build_kwargs['flatpak'] = True

    pipeline_run = osbs.create_binary_container_pipeline_run(**build_kwargs)

    print_output(pipeline_run, export_metadata_file=args.export_metadata_file)

    return_val = -1

    if pipeline_run.has_succeeded():
        return_val = 0
    cleanup_used_resources = osbs.os_conf.get_cleanup_used_resources()
    if cleanup_used_resources:
        try:
            logger.info("pipeline run removed: %s",
                        pipeline_run.remove_pipeline_run())
        except OsbsResponseException:
            logger.error("failed to remove pipeline run %s",
                         pipeline_run.pipeline_run_name)
            raise
    return return_val
Ejemplo n.º 17
0
def osbs_cant_orchestrate(openshift):
    with NamedTemporaryFile(mode="wt") as fp:
        fp.write("""
[general]
build_json_dir = {build_json_dir}
[default]
openshift_url = /
use_auth = false
""".format(build_json_dir="inputs"))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
Ejemplo n.º 18
0
def test_no_branch():
    with NamedTemporaryFile(mode='w+') as f:
        f.write("""
[default]
openshift_url=https://172.0.0.1:8443/
registry_uri=127.0.0.1:5000
""")
        f.flush()
        f.seek(0)
        with pytest.raises(OsbsException):
            os_conf = Configuration(conf_file=f.name,
                                    conf_section="default")
            osbs = OSBS(os_conf)
            osbs.create_binary_container_pipeline_run(git_uri="https://example.com/example.git",
                                                      git_ref="master")
    def get_cluster_info(self, cluster, platform):
        kwargs = deepcopy(self.config_kwargs)
        kwargs['conf_section'] = cluster.name
        if self.osbs_client_config:
            kwargs['conf_file'] = os.path.join(self.osbs_client_config,
                                               'osbs.conf')

        conf = Configuration(**kwargs)
        osbs = OSBS(conf, conf)
        try:
            current_builds = self.get_current_builds(osbs)
        except OsbsException as e:
            # If the build is canceled reraise the error
            if isinstance(e.cause, BuildCanceledException):
                raise e

            self.log.exception("Error occurred while listing builds on %s",
                               cluster.name)
            return ClusterInfo(cluster, platform, osbs,
                               self.UNREACHABLE_CLUSTER_LOAD)

        load = current_builds / cluster.max_concurrent_builds
        self.log.debug(
            'enabled cluster %s for platform %s has load %s and active builds %s/%s',
            cluster.name, platform, load, current_builds,
            cluster.max_concurrent_builds)
        return ClusterInfo(cluster, platform, osbs, load)
Ejemplo n.º 20
0
def osbs_for_capture(tmpdir):
    kwargs = {
        'build_json_dir': 'inputs',
        'openshift_url': OPENSHIFT_URL,
        'namespace': TEST_OCP_NAMESPACE
    }

    with NamedTemporaryFile(mode="wt") as fp:
        config = dedent("""\
            [general]
            build_json_dir = {build_json_dir}

            [default]
            openshift_url = {openshift_url}
            use_auth = false
            namespace = {namespace}
            """)

        fp.write(config.format(**kwargs))
        fp.flush()
        dummy_config = Configuration(fp.name, conf_section='default')
        osbs = OSBS(dummy_config)

    setup_json_capture(osbs, osbs.os_conf, str(tmpdir))
    return osbs
Ejemplo n.º 21
0
    def get_plugins_with_user_params(self, build_json, user_params):
        #  get the reactor config map and derive an osbs instance from it

        from osbs.api import OSBS
        from osbs.conf import Configuration

        # make sure the input json is valid
        read_yaml(user_params, 'schemas/user_params.json')
        user_data = json.loads(user_params)
        reactor_config_override = user_data.get('reactor_config_override')
        if reactor_config_override:
            read_yaml(json.dumps(reactor_config_override), 'schemas/config.json')

        osbs_conf = Configuration(build_json_dir=user_data.get('build_json_dir'))
        osbs = OSBS(osbs_conf, osbs_conf)
        return osbs.render_plugins_configuration(user_params)
Ejemplo n.º 22
0
def get_openshift_session(config, namespace):
    from osbs.api import OSBS
    from osbs.conf import Configuration

    config_kwargs = {
        'verify_ssl': not config.openshift.get('insecure', False),
        'namespace': namespace,
        'use_auth': False,
        'conf_file': None,
        'openshift_url': config.openshift['url'],
    }

    if config.openshift.get('auth'):
        krb_keytab_path = config.openshift['auth'].get('krb_keytab_path')
        if krb_keytab_path:
            config_kwargs['kerberos_keytab'] = krb_keytab_path
        krb_principal = config.openshift['auth'].get('krb_principal')
        if krb_principal:
            config_kwargs['kerberos_principal'] = krb_principal
        krb_cache_path = config.openshift['auth'].get('krb_cache_path')
        if krb_cache_path:
            config_kwargs['kerberos_ccache'] = krb_cache_path
        ssl_certs_dir = config.openshift['auth'].get('ssl_certs_dir')
        if ssl_certs_dir:
            config_kwargs['client_cert'] = os.path.join(ssl_certs_dir, 'cert')
            config_kwargs['client_key'] = os.path.join(ssl_certs_dir, 'key')
        config_kwargs['use_auth'] = config.openshift['auth'].get(
            'enable', False)

    osbs_conf = Configuration(**config_kwargs)
    return OSBS(osbs_conf)
Ejemplo n.º 23
0
    def test_get_existing_build_config_by_name(self):
        build_config = {
            'metadata': {
                'name': 'name',
                'labels': {
                    'git-repo-name': 'reponame',
                    'git-branch': 'branch',
                }
            },
        }

        existing_build_config = copy.deepcopy(build_config)
        existing_build_config['_from'] = 'from-name'

        config = Configuration()
        osbs = OSBS(config, config)

        (flexmock(
            osbs.os).should_receive('get_build_config_by_labels').with_args([
                ('git-repo-name', 'reponame'), ('git-branch', 'branch')
            ]).once().and_raise(OsbsException))
        (flexmock(osbs.os).should_receive('get_build_config').with_args(
            'name').once().and_return(existing_build_config))

        actual_build_config = osbs._get_existing_build_config(build_config)
        assert actual_build_config == existing_build_config
        assert actual_build_config['_from'] == 'from-name'
Ejemplo n.º 24
0
def osbs106(openshift):
    with NamedTemporaryFile(mode="wt") as fp:
        fp.write("""
[general]
build_json_dir = {build_json_dir}
openshift_required_version = 1.0.6
[default]
openshift_url = /
build_from = image:buildroot:latest
""".format(build_json_dir="inputs"))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
Ejemplo n.º 25
0
    def test_create_build_config_label_mismatch(self):
        config = Configuration()
        osbs = OSBS(config, config)

        build_json = {
            'apiVersion': osbs.os_conf.get_openshift_api_version(),
            'metadata': {
                'name': 'build',
                'labels': {
                    'git-repo-name': 'reponame',
                    'git-branch': 'branch',
                },
            },
        }

        existing_build_json = copy.deepcopy(build_json)
        existing_build_json['metadata']['name'] = 'build'
        existing_build_json['metadata']['labels'][
            'git-repo-name'] = 'reponame2'
        existing_build_json['metadata']['labels']['git-branch'] = 'branch2'

        build_request = flexmock(render=lambda: build_json,
                                 is_auto_instantiated=lambda: False)

        (flexmock(osbs).should_receive('_get_existing_build_config').once().
         and_return(existing_build_json))

        with pytest.raises(OsbsValidationException) as exc:
            osbs._create_build_config_and_build(build_request)

        assert 'Git labels collide' in str(exc.value)
Ejemplo n.º 26
0
    def test_create_build_config_already_running(self):
        config = Configuration()
        osbs = OSBS(config, config)

        build_json = {
            'apiVersion': osbs.os_conf.get_openshift_api_version(),
            'metadata': {
                'name': 'build',
                'labels': {
                    'git-repo-name': 'reponame',
                    'git-branch': 'branch',
                },
            },
        }

        existing_build_json = copy.deepcopy(build_json)
        existing_build_json['metadata']['name'] = 'existing-build'

        build_request = flexmock(render=lambda: build_json,
                                 is_auto_instantiated=lambda: False)

        (flexmock(osbs).should_receive('_get_existing_build_config').once().
         and_return(existing_build_json))

        (flexmock(osbs).should_receive(
            '_get_running_builds_for_build_config').once().and_return([
                flexmock(status='Running', get_build_name=lambda: 'build-1'),
            ]))

        with pytest.raises(OsbsException):
            osbs._create_build_config_and_build(build_request)
Ejemplo n.º 27
0
    def test_scratch_param_to_create_build(self):
        config = Configuration()
        osbs = OSBS(config, config)

        class MockParser(object):
            labels = {'Name': 'fedora23/something'}
            baseimage = 'fedora23/python'

        kwargs = {
            'git_uri': TEST_GIT_URI,
            'git_ref': TEST_GIT_REF,
            'git_branch': TEST_GIT_BRANCH,
            'user': TEST_USER,
            'component': TEST_COMPONENT,
            'target': TEST_TARGET,
            'architecture': TEST_ARCH,
            'yum_repourls': None,
            'koji_task_id': None,
            'scratch': True,
        }

        (flexmock(utils).should_receive('get_df_parser').with_args(
            TEST_GIT_URI, TEST_GIT_REF,
            git_branch=TEST_GIT_BRANCH).and_return(MockParser()))

        (flexmock(osbs).should_receive(
            '_create_scratch_build').once().and_return(
                flexmock(json=lambda: {'spam': 'maps'})))

        (flexmock(osbs.os).should_receive('create_build_config').never())

        (flexmock(osbs.os).should_receive('update_build_config').never())

        build_response = osbs.create_build(**kwargs)
        assert build_response.json() == {'spam': 'maps'}
Ejemplo n.º 28
0
    def test_create_build_config_create(self):
        config = Configuration()
        osbs = OSBS(config, config)

        build_json = {
            'apiVersion': osbs.os_conf.get_openshift_api_version(),
            'metadata': {
                'name': 'build',
                'labels': {
                    'git-repo-name': 'reponame',
                    'git-branch': 'branch',
                },
            },
        }

        build_request = flexmock(render=lambda: build_json,
                                 is_auto_instantiated=lambda: False)

        (flexmock(osbs).should_receive(
            '_get_existing_build_config').once().and_return(None))

        (flexmock(osbs.os).should_receive('create_build_config').with_args(
            json.dumps(build_json)).once().and_return(
                flexmock(json=lambda: {'spam': 'maps'})))

        (flexmock(osbs.os).should_receive(
            'start_build').with_args('build').once().and_return(
                flexmock(json=lambda: {'spam': 'maps'})))

        build_response = osbs._create_build_config_and_build(build_request)
        assert build_response.json == {'spam': 'maps'}
Ejemplo n.º 29
0
    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the v1 image
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only

        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.namespace = None
Ejemplo n.º 30
0
def get_openshift_session(workflow, fallback):
    config = get_openshift(workflow, fallback)
    namespace = get_build_json().get('metadata', {}).get('namespace', None)

    from osbs.api import OSBS
    from osbs.conf import Configuration

    config_kwargs = {
        'verify_ssl': not config.get('insecure', False),
        'namespace': namespace,
        'use_auth': False,
        'conf_file': None,
        'openshift_url': config['url'],
        'build_json_dir': config.get('build_json_dir')
    }

    if config.get('auth'):
        krb_keytab_path = config['auth'].get('krb_keytab_path')
        if krb_keytab_path:
            config_kwargs['kerberos_keytab'] = krb_keytab_path
        krb_principal = config['auth'].get('krb_principal')
        if krb_principal:
            config_kwargs['kerberos_principal'] = krb_principal
        krb_cache_path = config['auth'].get('krb_cache_path')
        if krb_cache_path:
            config_kwargs['kerberos_ccache'] = krb_cache_path
        ssl_certs_dir = config['auth'].get('ssl_certs_dir')
        if ssl_certs_dir:
            config_kwargs['client_cert'] = os.path.join(ssl_certs_dir, 'cert')
            config_kwargs['client_key'] = os.path.join(ssl_certs_dir, 'key')
        config_kwargs['use_auth'] = config['auth'].get('enable', False)

    osbs_conf = Configuration(**config_kwargs)
    return OSBS(osbs_conf, osbs_conf)
Ejemplo n.º 31
0
    def test_verify_no_running_builds_zero(self):
        config = Configuration()
        osbs = OSBS(config, config)

        (flexmock(osbs).should_receive('_get_running_builds_for_build_config').
         with_args('build_config_name').once().and_return([]))

        osbs._verify_no_running_builds('build_config_name')
Ejemplo n.º 32
0
def osbs(openshift, kwargs=None, additional_config=None, platform_descriptors=None):
    kwargs = kwargs or {}
    platform_descriptors = platform_descriptors or {}

    kwargs.setdefault('build_json_dir', 'inputs')
    kwargs.setdefault('registry_uri', 'registry.example.com')
    kwargs.setdefault('additional_general', '')
    with NamedTemporaryFile(mode="wt") as fp:
        config = dedent("""\
            [general]
            build_json_dir = {build_json_dir}
            {additional_general}

            [default]
            openshift_url = /
            registry_uri = {registry_uri}
            sources_command = fedpkg sources
            vendor = Example, Inc.
            build_host = localhost
            authoritative_registry = registry.example.com
            distribution_scope = authoritative-source-only
            koji_root = http://koji.example.com/kojiroot
            koji_hub = http://koji.example.com/kojihub
            use_auth = false
            can_orchestrate = true
            """)

        if additional_config is not None:
            config += additional_config
            config += '\n'

        for platform, platform_info in platform_descriptors.items():
            if not platform_info:
                continue

            config += '[platform:{0}]\n'.format(platform)
            for item, value in platform_info.items():
                config += '{0} = {1}\n'.format(item, value)

        fp.write(config.format(**kwargs))
        fp.flush()
        dummy_config = Configuration(fp.name)
        osbs = OSBS(dummy_config, dummy_config)

    osbs.os = openshift
    return osbs
    def run(self):
        """
        run the plugin
        """

        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container")
            raise

        metadata = build_json.get("metadata", {})
        labels = metadata.get("labels", {})
        buildconfig = labels["buildconfig"]
        is_rebuild = labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild
            kwargs = {}
            if 'namespace' in metadata:
                kwargs['namespace'] = metadata['namespace']

            # FIXME: remove `openshift_uri` once osbs-client is released
            osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                      openshift_url=self.url,
                                      use_auth=self.use_auth,
                                      verify_ssl=self.verify_ssl)
            osbs = OSBS(osbs_conf, osbs_conf)
            labels = {self.label_key: self.label_value}
            try:
                osbs.set_labels_on_build_config(buildconfig, labels, **kwargs)
            except OsbsResponseException as ex:
                if ex.status_code == 409:
                    # Someone else was modifying the build
                    # configuration at the same time. Try again.
                    self.log.debug("got status %d, retrying", ex.status_code)
                    osbs.set_labels_on_build_config(buildconfig, labels,
                                                    **kwargs)
                else:
                    raise

        return is_rebuild
Ejemplo n.º 34
0
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        metadata = build_json.get("metadata", {})
        kwargs = {}
        if 'namespace' in metadata:
            kwargs['namespace'] = metadata['namespace']

        osbs_conf = Configuration(openshift_uri=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl)
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            osbs.get_image_stream(self.imagestream, **kwargs)
        except OsbsResponseException:
            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            # Tags are imported automatically on creation
            osbs.create_image_stream(self.imagestream, self.docker_image_repo,
                                     **kwargs)
        else:
            self.log.info("Importing tags for %s", self.imagestream)
            osbs.import_image(self.imagestream, **kwargs)
Ejemplo n.º 35
0
 def setup_osbs_api(self):
     metadata = get_build_json().get("metadata", {})
     osbs_conf = Configuration(conf_file=None,
                               openshift_url=self.url,
                               use_auth=self.use_auth,
                               verify_ssl=self.verify_ssl,
                               build_json_dir=self.build_json_dir,
                               namespace=metadata.get('namespace', None))
     self.osbs = OSBS(osbs_conf, osbs_conf)
Ejemplo n.º 36
0
    def test_create_build_config_bad_version(self):
        config = Configuration()
        osbs = OSBS(config, config)
        build_json = {'apiVersion': 'spam'}
        build_request = flexmock(render=lambda: build_json,
                                 is_auto_instantiated=lambda: False)

        with pytest.raises(OsbsValidationException):
            osbs._create_build_config_and_build(build_request)
Ejemplo n.º 37
0
    def run(self):
        metadata = get_build_json().get("metadata", {})
        kwargs = {}

        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=self.url,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)
        imagestream = None
        try:
            imagestream = osbs.get_image_stream(self.imagestream)
        except OsbsResponseException:
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            imagestream = osbs.create_image_stream(self.imagestream,
                                                   self.docker_image_repo,
                                                   **kwargs)
        self.log.info("Importing new tags for %s", self.imagestream)

        primaries = None
        try:
            primaries = self.workflow.build_result.annotations['repositories'][
                'primary']
        except (TypeError, KeyError):
            self.log.exception(
                'Unable to read primary repositories annotations')

        if not primaries:
            raise RuntimeError('Could not find primary images in workflow')

        failures = False
        for s in primaries:
            tag_image_name = ImageName.parse(s)
            tag = tag_image_name.tag
            try:
                osbs.ensure_image_stream_tag(imagestream.json(), tag)
                self.log.info("Imported ImageStreamTag: (%s)", tag)
            except OsbsResponseException:
                failures = True
                self.log.info("Could not import ImageStreamTag: (%s)", tag)
        if failures:
            raise RuntimeError(
                "Failed to import ImageStreamTag(s). Check logs")

        osbs.import_image(self.imagestream)
Ejemplo n.º 38
0
    def __init__(self,
                 tasker,
                 workflow,
                 kojihub,
                 url,
                 verify_ssl=True,
                 use_auth=True,
                 koji_ssl_certs=None,
                 koji_proxy_user=None,
                 koji_principal=None,
                 koji_keytab=None,
                 metadata_only=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the v1 image
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only

        osbs_conf = Configuration(conf_file=None,
                                  openshift_uri=url,
                                  use_auth=use_auth,
                                  verify_ssl=verify_ssl)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.namespace = None
 def osbs(self):
     """Handler of OSBS object"""
     if not self._osbs:
         os_conf = Configuration()
         build_conf = Configuration()
         if self.opts.get('scratch'):
             os_conf = Configuration(conf_section='scratch')
             build_conf = Configuration(conf_section='scratch')
         self._osbs = OSBS(os_conf, build_conf)
         assert self._osbs
     return self._osbs
Ejemplo n.º 40
0
    def test_verify_no_running_builds_one(self):
        config = Configuration()
        osbs = OSBS(config, config)

        (flexmock(osbs).should_receive('_get_running_builds_for_build_config').
         with_args('build_config_name').once().and_return([
             flexmock(status='Running', get_build_name=lambda: 'build-1'),
         ]))

        with pytest.raises(OsbsException) as exc:
            osbs._verify_no_running_builds('build_config_name')
        assert str(exc.value).startswith('Build build-1 for build_config_name')
    def run(self):
        """
        run the plugin
        """

        metadata = get_build_json().get("metadata", {})
        labels = metadata.get("labels", {})
        buildconfig = labels["buildconfig"]
        is_rebuild = labels.get(self.label_key) == self.label_value
        self.log.info("This is a rebuild? %s", is_rebuild)

        if not is_rebuild:
            # Update the BuildConfig metadata so the next Build
            # instantiated from it is detected as being an automated
            # rebuild

            # FIXME: remove `openshift_uri` once osbs-client is released
            osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                      openshift_url=self.url,
                                      use_auth=self.use_auth,
                                      verify_ssl=self.verify_ssl,
                                      namespace=metadata.get('namespace', None))
            osbs = OSBS(osbs_conf, osbs_conf)
            labels = {self.label_key: self.label_value}
            try:
                osbs.set_labels_on_build_config(buildconfig, labels)
            except OsbsResponseException as ex:
                if ex.status_code == 409:
                    # Someone else was modifying the build
                    # configuration at the same time. Try again.
                    self.log.debug("got status %d, retrying", ex.status_code)
                    osbs.set_labels_on_build_config(buildconfig, labels)
                else:
                    raise

        return is_rebuild
Ejemplo n.º 42
0
    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None
Ejemplo n.º 43
0
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        metadata = build_json.get("metadata", {})
        kwargs = {}
        if 'namespace' in metadata:
            kwargs['namespace'] = metadata['namespace']

        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(openshift_uri=self.url,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir)
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            osbs.get_image_stream(self.imagestream, **kwargs)
        except OsbsResponseException:
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            # Tags are imported automatically on creation
            osbs.create_image_stream(self.imagestream, self.docker_image_repo,
                                     **kwargs)
        else:
            self.log.info("Importing tags for %s", self.imagestream)
            retry_attempts = 3
            while True:
                result = osbs.import_image(self.imagestream, **kwargs)
                if result != False:
                    break

                if retry_attempts > 0:
                    retry_attempts -= 1
                    self.log.info("no new tags, will retry after %d seconds",
                                  self.retry_delay)
                    sleep(self.retry_delay)
Ejemplo n.º 44
0
    def run(self):
        metadata = get_build_json().get("metadata", {})
        kwargs = {}

        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(openshift_uri=self.url,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            osbs.get_image_stream(self.imagestream)
        except OsbsResponseException:
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info("Creating ImageStream %s for %s", self.imagestream,
                          self.docker_image_repo)

            # Tags are imported automatically on creation
            osbs.create_image_stream(self.imagestream, self.docker_image_repo,
                                     **kwargs)
        else:
            self.log.info("Importing tags for %s", self.imagestream)
            retry_attempts = 3
            while True:
                result = osbs.import_image(self.imagestream, **kwargs)
                if result:
                    break

                if retry_attempts > 0:
                    retry_attempts -= 1
                    self.log.info("no new tags, will retry after %d seconds",
                                  self.retry_delay)
                    sleep(self.retry_delay)
Ejemplo n.º 45
0
class KojiImportPlugin(ExitPlugin):
    """
    Import this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = PLUGIN_KOJI_IMPORT_PLUGIN_KEY
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiImportPlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None

    def get_output(self, worker_metadatas):
        """
        Build the output entry of the metadata.

        :return: list, containing dicts of partial metadata
        """
        outputs = []
        has_pulp_pull = PLUGIN_PULP_PULL_KEY in self.workflow.exit_results
        try:
            pulp_sync_results = self.workflow.postbuild_results[PLUGIN_PULP_SYNC_KEY]
            crane_registry = pulp_sync_results[0]
        except (KeyError, IndexError):
            crane_registry = None

        for platform in worker_metadatas:
            for instance in worker_metadatas[platform]['output']:
                instance['buildroot_id'] = '{}-{}'.format(platform, instance['buildroot_id'])

                if instance['type'] == 'docker-image':
                    # update image ID with pulp_pull results;
                    # necessary when using Pulp < 2.14. Only do this
                    # when building for a single architecture -- if
                    # building for many, we know Pulp has schema 2
                    # support.
                    if len(worker_metadatas) == 1 and has_pulp_pull:
                        if self.workflow.builder.image_id is not None:
                            instance['extra']['docker']['id'] = self.workflow.builder.image_id

                    # update repositories to point to Crane
                    if crane_registry:
                        pulp_pullspecs = []
                        docker = instance['extra']['docker']
                        for pullspec in docker['repositories']:
                            image = ImageName.parse(pullspec)
                            image.registry = crane_registry.registry
                            pulp_pullspecs.append(image.to_str())

                        docker['repositories'] = pulp_pullspecs

                outputs.append(instance)

        return outputs

    def get_parent_image_koji_build_id(self):
        res = self.workflow.prebuild_results.get(PLUGIN_KOJI_PARENT_KEY) or {}
        build_info = res.get('parent-image-koji-build') or {}
        return build_info.get('id')

    def get_buildroot(self, worker_metadatas):
        """
        Build the buildroot entry of the metadata.

        :return: list, containing dicts of partial metadata
        """
        buildroots = []
        for platform in sorted(worker_metadatas.keys()):
            for instance in worker_metadatas[platform]['buildroots']:
                instance['id'] = '{}-{}'.format(platform, instance['id'])
                buildroots.append(instance)

        return buildroots

    def get_logs(self):
        """
        Build list of log files

        :return: list, of log files
        """

        logs = None
        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_orchestrator_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
            return output
        except TypeError:
            # Older osbs-client has no get_orchestrator_build_logs
            self.log.error("OSBS client does not support get_orchestrator_build_logs")
            return output

        platform_logs = {}
        for entry in logs:
            platform = entry.platform
            if platform not in platform_logs:
                filename = 'orchestrator' if platform is None else platform
                platform_logs[platform] = NamedTemporaryFile(prefix="%s-%s" %
                                                             (self.build_id, filename),
                                                             suffix=".log", mode='wb')
            platform_logs[platform].write((entry.line + '\n').encode('utf-8'))

        for platform, logfile in platform_logs.items():
            logfile.flush()
            filename = 'orchestrator' if platform is None else platform
            metadata = self.get_output_metadata(logfile.name, "%s.log" % filename)
            output.append(Output(file=logfile, metadata=metadata))

        return output

    def set_help(self, extra, worker_metadatas):
        all_annotations = [get_worker_build_info(self.workflow, platform).build.get_annotations()
                           for platform in worker_metadatas]
        help_known = ['help_file' in annotations for annotations in all_annotations]
        # Only set the 'help' key when any 'help_file' annotation is set
        if any(help_known):
            # See if any are not None
            for known, annotations in zip(help_known, all_annotations):
                if known:
                    help_file = json.loads(annotations['help_file'])
                    if help_file is not None:
                        extra['image']['help'] = help_file
                        break
            else:
                # They are all None
                extra['image']['help'] = None

    def set_media_types(self, extra, worker_metadatas):
        media_types = []
        for platform in worker_metadatas:
            annotations = get_worker_build_info(self.workflow, platform).build.get_annotations()
            if annotations.get('media-types'):
                media_types = json.loads(annotations['media-types'])
                break

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.exit_results.get(PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            media_types += pulp_pull_results

        if media_types:
            extra['image']['media_types'] = sorted(list(set(media_types)))

    def remove_unavailable_manifest_digests(self, worker_metadatas):
        try:
            available = get_manifests_in_pulp_repository(self.workflow)
        except KeyError:
            # pulp_sync didn't run
            return

        for platform, metadata in worker_metadatas.items():
            for output in metadata['output']:
                if output['type'] != 'docker-image':
                    continue

                unavailable = []
                repositories = output['extra']['docker']['repositories']
                for pullspec in repositories:
                    # Ignore by-tag pullspecs
                    if '@' not in pullspec:
                        continue

                    _, digest = pullspec.split('@', 1)
                    if digest not in available:
                        self.log.info("%s: %s not available, removing", platform, pullspec)
                        unavailable.append(pullspec)

                # Update the list in-place
                for pullspec in unavailable:
                    repositories.remove(pullspec)

    def set_group_manifest_info(self, extra, worker_metadatas):
        version_release = None
        primary_images = get_primary_images(self.workflow)
        for image in primary_images:
            if '-' in image.tag:  # {version}-{release} only, and only one instance
                version_release = image.tag
                break

        assert version_release is not None, 'Unable to find version-release image'
        tags = [image.tag for image in primary_images]

        manifest_list_digests = self.workflow.postbuild_results.get(PLUGIN_GROUP_MANIFESTS_KEY)
        if manifest_list_digests:
            index = {}
            index['tags'] = tags
            repositories = self.workflow.build_result.annotations['repositories']['unique']
            repo = ImageName.parse(repositories[0]).to_str(registry=False, tag=False)
            # group_manifests added the registry, so this should be valid
            registries = self.workflow.push_conf.pulp_registries
            if not registries:
                registries = self.workflow.push_conf.all_registries
            for registry in registries:
                manifest_list_digest = manifest_list_digests[repo]
                pullspec = "{0}/{1}@{2}".format(registry.uri, repo, manifest_list_digest.default)
                index['pull'] = [pullspec]
                pullspec = "{0}/{1}:{2}".format(registry.uri, repo,
                                                version_release)
                index['pull'].append(pullspec)

                # Store each digest with according media type
                index['digests'] = {}
                for version, digest in manifest_list_digest.items():
                    if digest:
                        media_type = get_manifest_media_type(version)
                        index['digests'][media_type] = digest
                break
            extra['image']['index'] = index
        # group_manifests returns None if didn't run, {} if group=False
        else:
            for platform in worker_metadatas:
                if platform == "x86_64":
                    for instance in worker_metadatas[platform]['output']:
                        if instance['type'] == 'docker-image':
                            # koji_upload, running in the worker, doesn't have the full tags
                            # so set them here
                            instance['extra']['docker']['tags'] = tags
                            repositories = []
                            for pullspec in instance['extra']['docker']['repositories']:
                                if '@' not in pullspec:
                                    image = ImageName.parse(pullspec)
                                    image.tag = version_release
                                    pullspec = image.to_str()

                                repositories.append(pullspec)

                            instance['extra']['docker']['repositories'] = repositories
                            self.log.debug("reset tags to so that docker is %s",
                                           instance['extra']['docker'])
                            annotations = get_worker_build_info(self.workflow, platform).\
                                build.get_annotations()
                            digests = {}
                            if 'digests' in annotations:
                                digests = get_digests_map_from_annotations(annotations['digests'])
                                instance['extra']['docker']['digests'] = digests


    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        return metadata

    def get_build(self, metadata, worker_metadatas):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        parent_id = self.get_parent_image_koji_build_id()
        if parent_id is not None:
            try:
                parent_id = int(parent_id)
            except ValueError:
                self.log.exception("invalid koji parent id %r", parent_id)
            else:
                extra.setdefault('image', {})
                extra['image']['parent_build_id'] = parent_id

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            extra['image'].update(flatpak_source_info.koji_metadata())

        if koji_task_id:
            koji_task_owner = get_koji_task_owner(self.session, koji_task_id, default=None)['name']
        else:
            koji_task_owner = None
        extra['submitter'] = self.session.getLoggedInUser()['name']

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        self.set_help(extra, worker_metadatas)
        self.set_media_types(extra, worker_metadatas)
        self.remove_unavailable_manifest_digests(worker_metadatas)
        self.set_group_manifest_info(extra, worker_metadatas)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
            'owner': koji_task_owner,
        }

        return build

    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output = self.get_output(worker_metadatas)
        output_files = [add_log_type(add_buildroot_id(md, buildroot_id))
                        for md in self.get_logs()]
        output.extend([of.metadata for of in output_files])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """

        # krbV python library throws an error if these are unicode
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs,
            "krb_principal": str(self.koji_principal),
            "krb_keytab": str(self.koji_keytab)
        }
        return create_koji_session(str(self.kojihub), auth_info)

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not importing failed build to koji")
            return

        self.session = self.login()

        server_dir = get_koji_upload_dir(self.workflow)

        koji_metadata, output_files = self.combine_metadata_fragments()

        try:
            for output in output_files:
                if output.file:
                    self.upload_file(self.session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        try:
            build_info = self.session.CGImport(koji_metadata, server_dir)
        except Exception:
            self.log.debug("metadata: %r", koji_metadata)
            raise

        # Older versions of CGImport do not return a value.
        build_id = build_info.get("id") if build_info else None

        self.log.debug("Build information: %s",
                       json.dumps(build_info, sort_keys=True, indent=4))

        return build_id
Ejemplo n.º 46
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the 'docker save' image will not be
    uploaded, only the logs. The import will be marked as
    metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = PLUGIN_KOJI_PROMOTE_PLUGIN_KEY
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user

        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab

        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.pullspec_image = None

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        cmd = "/bin/rpm " + rpm_qf_args(tags)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return parse_rpm_output(output.splitlines(), tags)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        try:
            buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        except KeyError:
            return ''

        try:
            pod = self.osbs.get_pod_for_build(self.build_id)
            all_images = pod.get_container_image_ids()
        except OsbsException as ex:
            self.log.error("unable to find image id: %r", ex)
            return buildroot_tag

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_info = self.tasker.get_info()
        host_arch, docker_version = get_docker_architecture(self.tasker)

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version,
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
        else:
            # Deleted once closed
            logfile = NamedTemporaryFile(prefix=self.build_id,
                                         suffix=".log",
                                         mode='wb')
            try:
                logfile.write(logs)
            except (TypeError, UnicodeEncodeError):
                # Older osbs-client versions returned Unicode objects
                logfile.write(logs.encode('utf-8'))
            logfile.flush()
            metadata = self.get_output_metadata(logfile.name,
                                                "openshift-final.log")
            output.append(Output(file=logfile, metadata=metadata))

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='wb')
        docker_logs.write("\n".join(self.workflow.build_result.logs).encode('utf-8'))
        docker_logs.flush()
        output.append(Output(file=docker_logs,
                             metadata=self.get_output_metadata(docker_logs.name,
                                                               "build.log")))
        return output

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        output = self.workflow.image_components
        if output is None:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            output = []

        return output

    def get_image_output(self, arch):
        """
        Create the output for the image

        This is the Koji Content Generator metadata, along with the
        'docker save' output to upload.

        For metadata-only builds, an empty file is used instead of the
        output of 'docker save'.

        :param arch: str, architecture for this output
        :return: tuple, (metadata dict, Output instance)

        """

        saved_image = self.workflow.exported_image_sequence[-1].get('path')
        image_name = get_image_upload_filename(self.workflow.exported_image_sequence[-1],
                                               self.workflow.builder.image_id,
                                               arch)
        if self.metadata_only:
            metadata = self.get_output_metadata(os.path.devnull, image_name)
            output = Output(file=None, metadata=metadata)
        else:
            metadata = self.get_output_metadata(saved_image, image_name)
            output = Output(file=open(saved_image), metadata=metadata)

        return metadata, output

    def get_digests(self):
        """
        Returns a map of images to their digests
        """

        try:
            pulp = get_manifests_in_pulp_repository(self.workflow)
        except KeyError:
            pulp = None

        digests = {}  # repository -> digests
        for registry in self.workflow.push_conf.docker_registries:
            for image in self.workflow.tag_conf.images:
                image_str = image.to_str()
                if image_str in registry.digests:
                    image_digests = registry.digests[image_str]
                    if pulp is None:
                        digest_list = [image_digests.default]
                    else:
                        # If Pulp is enabled, only report digests that
                        # were synced into Pulp. This may not be all
                        # of them, depending on whether Pulp has
                        # schema 2 support.
                        digest_list = [digest for digest in (image_digests.v1,
                                                             image_digests.v2)
                                       if digest in pulp]

                    digests[image.to_str(registry=False)] = digest_list

        return digests

    def get_repositories(self, digests):
        """
        Build the repositories metadata

        :param digests: dict, image -> digests
        """
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            image = self.pullspec_image.copy()
            image.registry = registry.uri
            pullspec = image.to_str()

            output_images.append(pullspec)

            digest_list = digests.get(image.to_str(registry=False), ())
            for digest in digest_list:
                digest_pullspec = image.to_str(tag=False) + "@" + digest
                output_images.append(digest_pullspec)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']

        # Read config from the registry using v2 schema 2 digest
        registries = self.workflow.push_conf.docker_registries
        if registries:
            config = copy.deepcopy(registries[0].config)
        else:
            config = {}

        # We don't need container_config section
        if config and 'container_config' in config:
            del config['container_config']

        digests = self.get_digests()
        repositories = self.get_repositories(digests)
        arch = os.uname()[4]
        tags = set(image.tag for image in self.workflow.tag_conf.primary_images)
        metadata, output = self.get_image_output(arch)
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                    'layer_sizes': self.workflow.layer_sizes,
                    'tags': list(tags),
                    'config': config
                },
            },
        })
        annotations = self.workflow.build_result.annotations
        if annotations and 'digests' in annotations:
            digests = get_digests_map_from_annotations(annotations['digests'])
            metadata['extra']['docker']['digests'] = digests

        if not config:
            del metadata['extra']['docker']['config']

        # Add the 'docker save' image to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)

        labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels

        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {'autorebuild': is_rebuild(self.workflow)}}

        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            try:
                extra['container_koji_task_id'] = int(koji_task_id)
            except ValueError:
                self.log.error("invalid task ID %r", koji_task_id, exc_info=1)

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                fs_task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                try:
                    task_id = int(fs_task_id)
                except ValueError:
                    self.log.error("invalid task ID %r", fs_task_id, exc_info=1)
                else:
                    extra['filesystem_koji_task_id'] = task_id

        # Append media_types from pulp pull
        pulp_pull_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PULL_KEY)
        if pulp_pull_results:
            extra['image']['media_types'] = sorted(list(set(pulp_pull_results)))

        # Append parent_build_id from koji parent
        parent_results = self.workflow.prebuild_results.get(PLUGIN_KOJI_PARENT_KEY) or {}
        parent_id = parent_results.get('parent-image-koji-build', {}).get('id')
        if parent_id is not None:
            try:
                parent_id = int(parent_id)
            except ValueError:
                self.log.exception("invalid koji parent id %r", parent_id)
            else:
                extra['image']['parent_build_id'] = parent_id

        # Append isolated build flag
        try:
            isolated = str(metadata['labels']['isolated']).lower() == 'true'
        except (IndexError, AttributeError, KeyError):
            isolated = False
        self.log.info("build is isolated: %r", isolated)
        extra['image']['isolated'] = isolated

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                extra['image']['help'] = None
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                extra['image']['help'] = help_result['help_file']
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        flatpak_source_info = get_flatpak_source_info(self.workflow)
        if flatpak_source_info is not None:
            extra['image'].update(flatpak_source_info.koji_metadata())

        resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY)
        if resolve_comp_result:
            extra['image']['odcs'] = {
                'compose_ids': [item['id'] for item in resolve_comp_result['composes']],
                'signing_intent': resolve_comp_result['signing_intent'],
                'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'],
            }

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.unique_images:
            self.pullspec_image = image
            break

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.pullspec_image = image
                break

        if not self.pullspec_image:
            raise RuntimeError('Unable to determine pullspec_image')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """

        # krbV python library throws an error if these are unicode
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs,
            "krb_principal": str(self.koji_principal),
            "krb_keytab": str(self.koji_keytab)
        }
        return create_koji_session(str(self.kojihub), auth_info)

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        try:
            build_info = session.CGImport(koji_metadata, server_dir)
        except Exception:
            self.log.debug("metadata: %r", koji_metadata)
            raise

        # Older versions of CGImport do not return a value.
        build_id = build_info.get("id") if build_info else None

        self.log.debug("Build information: %s",
                       json.dumps(build_info, sort_keys=True, indent=4))

        # If configured, koji_tag_build plugin will perform build tagging
        tag_later = are_plugins_in_order(self.workflow.exit_plugins_conf,
                                         PLUGIN_KOJI_PROMOTE_PLUGIN_KEY,
                                         PLUGIN_KOJI_TAG_BUILD_KEY)
        if not tag_later and build_id is not None and self.target is not None:
            tag_koji_build(session, build_id, self.target,
                           poll_interval=self.poll_interval)

        return build_id
Ejemplo n.º 47
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the v1 image will not be uploaded, only
    the logs. The import will be marked as metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = "koji_promote"
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the v1 image
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only

        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.namespace = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """

        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        kwargs = {}
        if self.namespace is not None:
            kwargs['namespace'] = self.namespace
        pod = self.osbs.get_pod_for_build(self.build_id, **kwargs)
        all_images = pod.get_container_image_ids()

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_version = self.tasker.get_version()
        docker_info = self.tasker.get_info()
        host_arch = docker_version['Arch']
        if host_arch == 'amd64':
            host_arch = 'x86_64'

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version['Version'],
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        # Collect logs from server
        kwargs = {}
        if self.namespace is not None:
            kwargs['namespace'] = self.namespace
        logs = self.osbs.get_build_logs(self.build_id, **kwargs)

        # Deleted once closed
        logfile = NamedTemporaryFile(prefix=self.build_id,
                                     suffix=".log",
                                     mode='w')
        logfile.write(logs)
        logfile.flush()

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='w')
        docker_logs.write("\n".join(self.workflow.build_logs))
        docker_logs.flush()

        return [Output(file=docker_logs,
                       metadata=self.get_output_metadata(docker_logs.name,
                                                         "build.log")),
                Output(file=logfile,
                       metadata=self.get_output_metadata(logfile.name,
                                                         "openshift-final.log"))]

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            return []

        return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags,
                                     separator=',')

    def get_image_output(self):
        """
        Create the output for the image

        For v1, this is the v1 image. For v2, this is the v2 metadata
        with the checksum of an empty file, and no actual upload.

        :return: tuple, (metadata dict, Output instance)
        """

        image_id = self.workflow.builder.image_id
        v1_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = v1_image.split('.', 1)[1]
        if self.metadata_only:
            v2_image_name = 'docker-v2-image-{0}.{1}'.format(image_id, ext)
            metadata = self.get_output_metadata(os.path.devnull, v2_image_name)
            output = Output(file=None, metadata=metadata)
        else:
            v1_image_name = 'docker-v1-image-{0}.{1}'.format(image_id, ext)
            metadata = self.get_output_metadata(v1_image, v1_image_name)
            output = Output(file=open(v1_image), metadata=metadata)

        return metadata, output

    def get_output_images(self):
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            for image in (self.workflow.tag_conf.primary_images +
                          self.workflow.tag_conf.unique_images):
                registry_image = image.copy()
                registry_image.registry = registry.uri
                if registry_image not in output_images:
                    output_images.append(registry_image)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']
        output_images = self.get_output_images()
        repositories = [image.to_str() for image in output_images
                        if image.tag != 'latest']
        arch = os.uname()[4]
        metadata, output = self.get_image_output()
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                },
            },
        })

        # Add the v1 image (or v2 metadata) to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        build_start_time = metadata["creationTimestamp"]
        try:
            # Decode UTC RFC3339 date with no fractional seconds
            # (the format we expect)
            start_time_struct = time.strptime(build_start_time,
                                              '%Y-%m-%dT%H:%M:%SZ')
            start_time = int(time.mktime(start_time_struct))
        except ValueError:
            self.log.error("Invalid time format (%s)", build_start_time)
            raise

        name = None
        version = None
        release = None
        for image_name in self.workflow.tag_conf.primary_images:
            if '-' in image_name.tag:
                name = image_name.repo
                version, release = image_name.tag.split('-', 1)

        if name is None or version is None or release is None:
            raise RuntimeError('Unable to determine name-version-release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        build = {
            'name': name,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': {
                'image': {},
            },
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. "
                           "Probably not running in build container.")
            raise

        try:
            metadata = build_json["metadata"]
            self.build_id = metadata["name"]
            self.namespace = metadata.get("namespace")
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)
        session.uploadWrapper(output.file.name, serverdir, name=name)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """
        session = koji.ClientSession(self.kojihub)
        kwargs = {}
        if self.koji_proxy_user:
            kwargs['proxyuser'] = self.koji_proxy_user

        if self.koji_ssl_certs:
            # Use certificates
            self.log.info("Using SSL certificates for Koji authentication")
            session.ssl_login(os.path.join(self.koji_ssl_certs, 'cert'),
                              os.path.join(self.koji_ssl_certs, 'ca'),
                              os.path.join(self.koji_ssl_certs, 'serverca'),
                              **kwargs)
        else:
            # Use Kerberos
            self.log.info("Using Kerberos for Koji authentication")
            if self.koji_principal and self.koji_keytab:
                kwargs['principal'] = self.koji_principal
                kwargs['keytab'] = self.koji_keytab

            session.krb_login(**kwargs)

        return session

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        if not is_rebuild(self.workflow):
            self.log.info("Not promoting to koji: not a rebuild")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        session.CGImport(koji_metadata, server_dir)

        self.log.debug("Submitted with metadata: %s",
                       json.dumps(koji_metadata, sort_keys=True, indent=4))
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url,
                                  use_auth=self.use_auth, verify_ssl=self.verify_ssl,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        base_image = self.workflow.builder.base_image
        if base_image is not None:
            base_image_name = base_image.to_str()
            try:
                base_image_id = self.workflow.base_image_inspect['Id']
            except KeyError:
                base_image_id = ""
        else:
            base_image_name = ""
            base_image_id = ""

        try:
            with open(self.workflow.builder.df_path) as f:
                dockerfile_contents = f.read()
        except AttributeError:
            dockerfile_contents = ""

        annotations = {
            "dockerfile": dockerfile_contents,

            # We no longer store the 'docker build' logs as an annotation
            "logs": '',

            # We no longer store the rpm packages as an annotation
            "rpm-packages": '',

            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": base_image_name,
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "plugins-metadata": json.dumps(self.get_plugin_metadata())
        }

        help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key)
        if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result:
            if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND:
                annotations['help_file'] = json.dumps(None)
            elif help_result['status'] == AddHelpPlugin.HELP_GENERATED:
                annotations['help_file'] = json.dumps(help_result['help_file'])
            else:
                self.log.error("Unknown result from add_help plugin: %s", help_result)

        pulp_push_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PUSH_KEY)
        if pulp_push_results:
            top_layer, _ = pulp_push_results
            annotations['v1-image-id'] = top_layer

        media_types = []
        if pulp_push_results:
            media_types += [MEDIA_TYPE_DOCKER_V1]

        # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin
        pulp_pull_results = (self.workflow.postbuild_results.get(PulpPullPlugin.key) or
                             self.workflow.exit_results.get(PulpPullPlugin.key))
        if isinstance(pulp_pull_results, Exception):
            pulp_pull_results = None

        if pulp_pull_results:
            media_types += pulp_pull_results

        if media_types:
            annotations['media-types'] = json.dumps(sorted(list(set(media_types))))

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })

        annotations.update(self.get_config_map())

        self.apply_build_result_annotations(annotations)

        # For arrangement version 4 onwards (where group_manifests
        # runs in the orchestrator build), restore the repositories
        # metadata which orchestrate_build adjusted.
        if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results:
            annotations['repositories'] = json.dumps(self.get_repositories())
        try:
            osbs.set_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
Ejemplo n.º 49
0
class KojiPromotePlugin(ExitPlugin):
    """
    Promote this build to Koji

    Submits a successful build to Koji using the Content Generator API,
    https://fedoraproject.org/wiki/Koji/ContentGenerators

    Authentication is with Kerberos unless the koji_ssl_certs
    configuration parameter is given, in which case it should be a
    path at which 'cert', 'ca', and 'serverca' are the certificates
    for SSL authentication.

    If Kerberos is used for authentication, the default principal will
    be used (from the kernel keyring) unless both koji_keytab and
    koji_principal are specified. The koji_keytab parameter is a
    keytab name like 'type:name', and so can be used to specify a key
    in a Kubernetes secret by specifying 'FILE:/path/to/key'.

    If metadata_only is set, the 'docker save' image will not be
    uploaded, only the logs. The import will be marked as
    metadata-only.

    Runs as an exit plugin in order to capture logs from all other
    plugins.
    """

    key = "koji_promote"
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, kojihub, url,
                 verify_ssl=True, use_auth=True,
                 koji_ssl_certs=None, koji_proxy_user=None,
                 koji_principal=None, koji_keytab=None,
                 metadata_only=False, blocksize=None,
                 target=None, poll_interval=5):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param kojihub: string, koji hub (xmlrpc)
        :param url: string, URL for OSv3 instance
        :param verify_ssl: bool, verify OSv3 SSL certificate?
        :param use_auth: bool, initiate authentication with OSv3?
        :param koji_ssl_certs: str, path to 'cert', 'ca', 'serverca'
        :param koji_proxy_user: str, user to log in as (requires hub config)
        :param koji_principal: str, Kerberos principal (must specify keytab)
        :param koji_keytab: str, keytab name (must specify principal)
        :param metadata_only: bool, whether to omit the 'docker save' image
        :param blocksize: int, blocksize to use for uploading files
        :param target: str, koji target
        :param poll_interval: int, seconds between Koji task status requests
        """
        super(KojiPromotePlugin, self).__init__(tasker, workflow)

        self.kojihub = kojihub
        self.koji_ssl_certs = koji_ssl_certs
        self.koji_proxy_user = koji_proxy_user
        self.koji_principal = koji_principal
        self.koji_keytab = koji_keytab
        self.metadata_only = metadata_only
        self.blocksize = blocksize
        self.target = target
        self.poll_interval = poll_interval

        self.namespace = get_build_json().get('metadata', {}).get('namespace', None)
        osbs_conf = Configuration(conf_file=None, openshift_uri=url,
                                  use_auth=use_auth, verify_ssl=verify_ssl,
                                  namespace=self.namespace)
        self.osbs = OSBS(osbs_conf, osbs_conf)
        self.build_id = None
        self.nvr_image = None

    @staticmethod
    def parse_rpm_output(output, tags, separator=';'):
        """
        Parse output of the rpm query.

        :param output: list, decoded output (str) from the rpm subprocess
        :param tags: list, str fields used for query output
        :return: list, dicts describing each rpm package
        """

        def field(tag):
            """
            Get a field value by name
            """
            try:
                value = fields[tags.index(tag)]
            except ValueError:
                return None

            if value == '(none)':
                return None

            return value

        components = []
        sigmarker = 'Key ID '
        for rpm in output:
            fields = rpm.rstrip('\n').split(separator)
            if len(fields) < len(tags):
                continue

            signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')
            if signature:
                parts = signature.split(sigmarker, 1)
                if len(parts) > 1:
                    signature = parts[1]

            component_rpm = {
                'type': 'rpm',
                'name': field('NAME'),
                'version': field('VERSION'),
                'release': field('RELEASE'),
                'arch': field('ARCH'),
                'sigmd5': field('SIGMD5'),
                'signature': signature,
            }

            # Special handling for epoch as it must be an integer or None
            epoch = field('EPOCH')
            if epoch is not None:
                epoch = int(epoch)

            component_rpm['epoch'] = epoch

            if component_rpm['name'] != 'gpg-pubkey':
                components.append(component_rpm)

        return components

    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        sep = ';'
        fmt = sep.join(["%%{%s}" % tag for tag in tags])
        cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return self.parse_rpm_output(output.splitlines(), tags, separator=sep)

    def get_output_metadata(self, path, filename):
        """
        Describe a file by its metadata.

        :return: dict
        """

        checksums = get_checksums(path, ['md5'])
        metadata = {'filename': filename,
                    'filesize': os.path.getsize(path),
                    'checksum': checksums['md5sum'],
                    'checksum_type': 'md5'}

        if self.metadata_only:
            metadata['metadata_only'] = True

        return metadata

    def get_builder_image_id(self):
        """
        Find out the docker ID of the buildroot image we are in.
        """

        try:
            buildroot_tag = os.environ["OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE"]
        except KeyError:
            return ''

        try:
            pod = self.osbs.get_pod_for_build(self.build_id)
            all_images = pod.get_container_image_ids()
        except OsbsException as ex:
            self.log.error("unable to find image id: %r", ex)
            return buildroot_tag

        try:
            return all_images[buildroot_tag]
        except KeyError:
            self.log.error("Unable to determine buildroot image ID for %s",
                           buildroot_tag)
            return buildroot_tag

    def get_buildroot(self, build_id):
        """
        Build the buildroot entry of the metadata.

        :return: dict, partial metadata
        """

        docker_version = self.tasker.get_version()
        docker_info = self.tasker.get_info()
        host_arch = docker_version['Arch']
        if host_arch == 'amd64':
            host_arch = 'x86_64'

        buildroot = {
            'id': 1,
            'host': {
                'os': docker_info['OperatingSystem'],
                'arch': host_arch,
            },
            'content_generator': {
                'name': PROG,
                'version': atomic_reactor_version,
            },
            'container': {
                'type': 'docker',
                'arch': os.uname()[4],
            },
            'tools': [
                {
                    'name': tool['name'],
                    'version': tool['version'],
                }
                for tool in get_version_of_tools()] + [
                {
                    'name': 'docker',
                    'version': docker_version['Version'],
                },
            ],
            'components': self.get_rpms(),
            'extra': {
                'osbs': {
                    'build_id': build_id,
                    'builder_image_id': self.get_builder_image_id(),
                }
            },
        }

        return buildroot

    def get_logs(self):
        """
        Build the logs entry for the metadata 'output' section

        :return: list, Output instances
        """

        output = []

        # Collect logs from server
        try:
            logs = self.osbs.get_build_logs(self.build_id)
        except OsbsException as ex:
            self.log.error("unable to get build logs: %r", ex)
        else:
            # Deleted once closed
            logfile = NamedTemporaryFile(prefix=self.build_id,
                                         suffix=".log",
                                         mode='w')
            logfile.write(logs)
            logfile.flush()
            metadata = self.get_output_metadata(logfile.name,
                                                "openshift-final.log")
            output.append(Output(file=logfile, metadata=metadata))

        docker_logs = NamedTemporaryFile(prefix="docker-%s" % self.build_id,
                                         suffix=".log",
                                         mode='w')
        docker_logs.write("\n".join(self.workflow.build_logs))
        docker_logs.flush()
        output.append(Output(file=docker_logs,
                             metadata=self.get_output_metadata(docker_logs.name,
                                                               "build.log")))
        return output

    def get_image_components(self):
        """
        Re-package the output of the rpmqa plugin into the format required
        for the metadata.
        """

        try:
            output = self.workflow.postbuild_results[PostBuildRPMqaPlugin.key]
        except KeyError:
            self.log.error("%s plugin did not run!",
                           PostBuildRPMqaPlugin.key)
            return []

        return self.parse_rpm_output(output, PostBuildRPMqaPlugin.rpm_tags,
                                     separator=',')

    def get_image_output(self, arch):
        """
        Create the output for the image

        This is the Koji Content Generator metadata, along with the
        'docker save' output to upload.

        For metadata-only builds, an empty file is used instead of the
        output of 'docker save'.

        :param arch: str, architecture for this output
        :return: tuple, (metadata dict, Output instance)

        """

        image_id = self.workflow.builder.image_id
        saved_image = self.workflow.exported_image_sequence[-1].get('path')
        ext = saved_image.split('.', 1)[1]
        name_fmt = 'docker-image-{id}.{arch}.{ext}'
        image_name = name_fmt.format(id=image_id, arch=arch, ext=ext)
        if self.metadata_only:
            metadata = self.get_output_metadata(os.path.devnull, image_name)
            output = Output(file=None, metadata=metadata)
        else:
            metadata = self.get_output_metadata(saved_image, image_name)
            output = Output(file=open(saved_image), metadata=metadata)

        return metadata, output

    def get_digests(self):
        """
        Returns a map of repositories to digests
        """

        digests = {}  # repository -> digest
        for registry in self.workflow.push_conf.docker_registries:
            for image in self.workflow.tag_conf.images:
                image_str = image.to_str()
                if image_str in registry.digests:
                    digest = registry.digests[image_str]
                    digests[image.to_str(registry=False)] = digest

        return digests

    def get_repositories(self, digests):
        """
        Build the repositories metadata

        :param digests: dict, repository -> digest
        """
        if self.workflow.push_conf.pulp_registries:
            # If pulp was used, only report pulp images
            registries = self.workflow.push_conf.pulp_registries
        else:
            # Otherwise report all the images we pushed
            registries = self.workflow.push_conf.all_registries

        output_images = []
        for registry in registries:
            image = self.nvr_image.copy()
            image.registry = registry.uri
            pullspec = image.to_str()

            output_images.append(pullspec)

            digest = digests.get(image.to_str(registry=False))
            if digest:
                digest_pullspec = image.to_str(tag=False) + "@" + digest
                output_images.append(digest_pullspec)

        return output_images

    def get_output(self, buildroot_id):
        """
        Build the 'output' section of the metadata.

        :return: list, Output instances
        """

        def add_buildroot_id(output):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        output_files = [add_log_type(add_buildroot_id(metadata))
                        for metadata in self.get_logs()]

        # Parent of squashed built image is base image
        image_id = self.workflow.builder.image_id
        parent_id = self.workflow.base_image_inspect['Id']
        digests = self.get_digests()
        repositories = self.get_repositories(digests)
        arch = os.uname()[4]
        metadata, output = self.get_image_output(arch)
        metadata.update({
            'arch': arch,
            'type': 'docker-image',
            'components': self.get_image_components(),
            'extra': {
                'image': {
                    'arch': arch,
                },
                'docker': {
                    'id': image_id,
                    'parent_id': parent_id,
                    'repositories': repositories,
                },
            },
        })

        # Add the 'docker save' image to the output
        image = add_buildroot_id(output)
        output_files.append(image)

        return output_files

    def get_build(self, metadata):
        start_time = int(atomic_reactor_start_time)
        labels = DockerfileParser(self.workflow.builder.df_path).labels
        component = get_preferred_label(labels, 'com.redhat.component')
        version = get_preferred_label(labels, 'version')
        release = get_preferred_label(labels, 'release')

        source = self.workflow.source
        if not isinstance(source, GitSource):
            raise RuntimeError('git source required')

        extra = {'image': {}}
        koji_task_id = metadata.get('labels', {}).get('koji-task-id')
        if koji_task_id is not None:
            self.log.info("build configuration created by Koji Task ID %s",
                          koji_task_id)
            extra['container_koji_task_id'] = koji_task_id

        fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key)
        if fs_result is not None:
            try:
                task_id = fs_result['filesystem-koji-task-id']
            except KeyError:
                self.log.error("%s: expected filesystem-koji-task-id in result",
                               AddFilesystemPlugin.key)
            else:
                extra['filesystem_koji_task_id'] = str(task_id)

        build = {
            'name': component,
            'version': version,
            'release': release,
            'source': "{0}#{1}".format(source.uri, source.commit_id),
            'start_time': start_time,
            'end_time': int(time.time()),
            'extra': extra,
        }

        if self.metadata_only:
            build['metadata_only'] = True

        return build

    def get_metadata(self):
        """
        Build the metadata needed for importing the build

        :return: tuple, the metadata and the list of Output instances
        """
        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        for image in self.workflow.tag_conf.primary_images:
            # dash at first/last postition does not count
            if '-' in image.tag[1:-1]:
                self.nvr_image = image
                break
        else:
            raise RuntimeError('Unable to determine name:version-release')

        metadata_version = 0

        build = self.get_build(metadata)
        buildroot = self.get_buildroot(build_id=self.build_id)
        output_files = self.get_output(buildroot['id'])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': [buildroot],
            'output': [output.metadata for output in output_files],
        }

        return koji_metadata, output_files

    def upload_file(self, session, output, serverdir):
        """
        Upload a file to koji

        :return: str, pathname on server
        """
        name = output.metadata['filename']
        self.log.debug("uploading %r to %r as %r",
                       output.file.name, serverdir, name)

        kwargs = {}
        if self.blocksize is not None:
            kwargs['blocksize'] = self.blocksize
            self.log.debug("using blocksize %d", self.blocksize)

        upload_logger = KojiUploadLogger(self.log)
        session.uploadWrapper(output.file.name, serverdir, name=name,
                              callback=upload_logger.callback, **kwargs)
        path = os.path.join(serverdir, name)
        self.log.debug("uploaded %r", path)
        return path

    @staticmethod
    def get_upload_server_dir():
        """
        Create a path name for uploading files to

        :return: str, path name expected to be unique
        """
        dir_prefix = 'koji-promote'
        random_chars = ''.join([random.choice(ascii_letters)
                                for _ in range(8)])
        unique_fragment = '%r.%s' % (time.time(), random_chars)
        return os.path.join(dir_prefix, unique_fragment)

    def login(self):
        """
        Log in to koji

        :return: koji.ClientSession instance, logged in
        """
        auth_info = {
            "proxyuser": self.koji_proxy_user,
            "ssl_certs_dir": self.koji_ssl_certs,
            "krb_principal": self.koji_principal,
            "krb_keytab": self.koji_keytab
        }
        return create_koji_session(self.kojihub, auth_info)

    def run(self):
        """
        Run the plugin.
        """

        if ((self.koji_principal and not self.koji_keytab) or
                (self.koji_keytab and not self.koji_principal)):
            raise RuntimeError("specify both koji_principal and koji_keytab "
                               "or neither")

        # Only run if the build was successful
        if self.workflow.build_process_failed:
            self.log.info("Not promoting failed build to koji")
            return

        koji_metadata, output_files = self.get_metadata()

        try:
            session = self.login()
            server_dir = self.get_upload_server_dir()
            for output in output_files:
                if output.file:
                    self.upload_file(session, output, server_dir)
        finally:
            for output in output_files:
                if output.file:
                    output.file.close()

        try:
            build_info = session.CGImport(koji_metadata, server_dir)
        except Exception:
            self.log.debug("metadata: %r", koji_metadata)
            raise

        # Older versions of CGImport do not return a value.
        build_id = build_info.get("id") if build_info else None

        self.log.debug("Build information: %s",
                       json.dumps(build_info, sort_keys=True, indent=4))

        # Tag the build
        if build_id is not None and self.target is not None:
            self.log.debug("Finding build tag for target %s", self.target)
            target_info = session.getBuildTarget(self.target)
            build_tag = target_info['dest_tag_name']
            self.log.info("Tagging build with %s", build_tag)
            task_id = session.tagBuild(build_tag, build_id)
            task = TaskWatcher(session, task_id,
                               poll_interval=self.poll_interval)
            task.wait()
            if task.failed():
                raise RuntimeError("Task %s failed to tag koji build" % task_id)

        return build_id
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container.")
            return

        kwargs = {}
        metadata = build_json.get("metadata", {})
        if "namespace" in metadata:
            kwargs["namespace"] = metadata["namespace"]

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(
            conf_file=None,
            openshift_uri=self.url,
            openshift_url=self.url,
            use_auth=self.use_auth,
            verify_ssl=self.verify_ssl,
        )
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        labels = {
            "dockerfile": self.get_pre_result(CpDockerfilePlugin.key),
            "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key),
            "logs": "\n".join(self.workflow.build_logs),
            "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)),
            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": self.workflow.base_image_inspect["Id"],
            "base-image-name": self.workflow.builder.base_image.to_str(),
            "image-id": self.workflow.builder.image_id,
            "digests": json.dumps(self.get_digests()),
        }

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and tar_path is not None:
            labels["tar_metadata"] = json.dumps(
                {
                    "size": tar_size,
                    "md5sum": tar_md5sum,
                    "sha256sum": tar_sha256sum,
                    "filename": os.path.basename(tar_path),
                }
            )
        osbs.set_annotations_on_build(build_id, labels, **kwargs)
        return labels
Ejemplo n.º 51
0
class ImportImagePlugin(PostBuildPlugin):
    """
    Import image tags from external docker registry into Origin,
    creating an ImageStream if one does not already exist.
    """

    key = 'import_image'
    is_allowed_to_fail = False

    def __init__(self, tasker, workflow, imagestream, docker_image_repo,
                 url, build_json_dir, verify_ssl=True, use_auth=True,
                 insecure_registry=None):
        """
        constructor

        :param tasker: DockerTasker instance
        :param workflow: DockerBuildWorkflow instance
        :param imagestream: str, name of ImageStream
        :param docker_image_repo: str, image repository to import tags from
        :param url: str, URL to OSv3 instance
        :param build_json_dir: str, path to directory with input json
        :param verify_ssl: bool, verify SSL certificate?
        :param use_auth: bool, initiate authentication with openshift?
        :param insecure_registry: bool, whether the Docker registry uses
               plain HTTP
        """
        # call parent constructor
        super(ImportImagePlugin, self).__init__(tasker, workflow)
        self.imagestream_name = imagestream
        self.docker_image_repo = docker_image_repo
        self.url = url
        self.build_json_dir = build_json_dir
        self.verify_ssl = verify_ssl
        self.use_auth = use_auth
        self.insecure_registry = insecure_registry

        self.osbs = None
        self.imagestream = None

    def run(self):
        self.setup_osbs_api()
        self.get_or_create_imagestream()
        self.process_tags()
        self.osbs.import_image(self.imagestream_name)

    def setup_osbs_api(self):
        metadata = get_build_json().get("metadata", {})
        osbs_conf = Configuration(conf_file=None,
                                  openshift_url=self.url,
                                  use_auth=self.use_auth,
                                  verify_ssl=self.verify_ssl,
                                  build_json_dir=self.build_json_dir,
                                  namespace=metadata.get('namespace', None))
        self.osbs = OSBS(osbs_conf, osbs_conf)

    def get_or_create_imagestream(self):
        try:
            self.imagestream = self.osbs.get_image_stream(self.imagestream_name)
        except OsbsResponseException:
            kwargs = {}
            if self.insecure_registry is not None:
                kwargs['insecure_registry'] = self.insecure_registry

            self.log.info('Creating ImageStream %s for %s', self.imagestream_name,
                          self.docker_image_repo)

            self.imagestream = self.osbs.create_image_stream(self.imagestream_name,
                                                             self.docker_image_repo,
                                                             **kwargs)

    def process_tags(self):
        self.log.info('Importing new tags for %s', self.imagestream_name)
        failures = False

        for tag in self.get_trackable_tags():
            try:
                self.osbs.ensure_image_stream_tag(self.imagestream.json(), tag)
                self.log.info('Imported ImageStreamTag: (%s)', tag)
            except OsbsResponseException:
                failures = True
                self.log.info('Could not import ImageStreamTag: (%s)', tag)

        if failures:
            raise RuntimeError('Failed to import ImageStreamTag(s). Check logs')

    def get_trackable_tags(self):
        primary_images = get_primary_images(self.workflow)
        if not primary_images:
            raise RuntimeError('Could not find primary images in workflow')

        tags = []
        for primary_image in primary_images:
            tag = primary_image.tag
            if '-' in tag:
                self.log.info('Skipping non-transient tag, %s', tag)
                continue
            tags.append(tag)

        return tags
    def run(self):
        metadata = get_build_json().get("metadata", {})

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        # FIXME: remove `openshift_uri` once osbs-client is released
        osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url,
                                  use_auth=self.use_auth, verify_ssl=self.verify_ssl,
                                  namespace=metadata.get('namespace', None))
        osbs = OSBS(osbs_conf, osbs_conf)

        try:
            commit_id = self.workflow.source.commit_id
        except AttributeError:
            commit_id = ""

        try:
            base_image_id = self.workflow.base_image_inspect['Id']
        except docker.errors.NotFound:
            base_image_id = ""

        annotations = {
            "dockerfile": self.get_pre_result(CpDockerfilePlugin.key),
            "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key),

            # We no longer store the 'docker build' logs as an annotation
            "logs": '',

            # We no longer store the rpm packages as an annotation
            "rpm-packages": '',

            "repositories": json.dumps(self.get_repositories()),
            "commit_id": commit_id,
            "base-image-id": base_image_id,
            "base-image-name": self.workflow.builder.base_image.to_str(),
            "image-id": self.workflow.builder.image_id or '',
            "digests": json.dumps(self.get_pullspecs(self.get_digests())),
            "plugins-metadata": json.dumps(self.get_plugin_metadata())
        }

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            annotations["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })
        try:
            osbs.set_annotations_on_build(build_id, annotations)
        except OsbsResponseException:
            self.log.debug("annotations: %r", annotations)
            raise

        labels = self.make_labels()
        if labels:
            try:
                osbs.update_labels_on_build(build_id, labels)
            except OsbsResponseException:
                self.log.debug("labels: %r", labels)
                raise

        return {"annotations": annotations, "labels": labels}
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container.")
            return

        kwargs = {}
        metadata = build_json.get("metadata", {})
        if 'namespace' in metadata:
            kwargs['namespace'] = metadata['namespace']

        try:
            build_id = metadata["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
                                  use_auth=self.use_auth, verify_ssl=self.verify_ssl)
        osbs = OSBS(osbs_conf, osbs_conf)

        # usually repositories formed from NVR labels
        # these should be used for pulling and layering
        primary_repositories = []
        for registry in self.workflow.push_conf.all_registries:
            for image in self.workflow.tag_conf.primary_images:
                registry_image = image.copy()
                registry_image.registry = registry.uri
                primary_repositories.append(registry_image.to_str())

        # unique unpredictable repositories
        unique_repositories = []
        for registry in self.workflow.push_conf.all_registries:
            for image in self.workflow.tag_conf.unique_images:
                registry_image = image.copy()
                registry_image.registry = registry.uri
                unique_repositories.append(registry_image.to_str())

        repositories = {
            "primary": primary_repositories,
            "unique": unique_repositories,
        }

        try:
            commit_id = self.workflow.source.lg.commit_id
        except AttributeError:
            commit_id = ""

        labels = {
            "dockerfile": self.get_pre_result(CpDockerfilePlugin.key),
            "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key),
            "logs": "\n".join(self.workflow.build_logs),
            "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)),
            "repositories": json.dumps(repositories),
            "commit_id": commit_id,
        }

        tar_path = tar_size = tar_md5sum = tar_sha256sum = None
        if len(self.workflow.exported_image_sequence) > 0:
            tar_path = self.workflow.exported_image_sequence[-1].get("path")
            tar_size = self.workflow.exported_image_sequence[-1].get("size")
            tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum")
            tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            labels["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })
        osbs.set_annotations_on_build(build_id, labels, **kwargs)
        return labels