示例#1
0
def openshift(request):
    os_inst = Openshift(OAPI_PREFIX,
                        API_VER,
                        "/oauth/authorize",
                        k8s_api_url=API_PREFIX)
    os_inst._con = Connection(request.param)
    return os_inst
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container.")
            return
        try:
            build_id = build_json["metadata"]["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        api_url = urljoin(self.url, "/osapi/v1beta1/")
        oauth_url = urljoin(self.url, "/oauth/authorize")  # MUST NOT END WITH SLASH

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        o = Openshift(api_url, oauth_url, None, use_auth=self.use_auth, verify_ssl=self.verify_ssl)

        primary_repositories = []
        for registry_uri in self.workflow.tag_and_push_conf.registries:
            registry_conf = self.workflow.tag_and_push_conf[registry_uri]
            try:
                image_names = registry_conf['image_names']
            except KeyError:
                self.log.error("Registry '%s' doesn't have any image names, skipping...", registry_uri)
                continue
            for image in image_names:
                image_name = ImageName.parse(image)
                if image_name.registry:
                    assert image_name.registry == registry_uri
                image_name.registry = registry_uri
                primary_repositories.append(image_name.to_str())

        unique_repositories = []
        target_image = self.workflow.builder.image.copy()
        for registry in self.workflow.target_registries:
            target_image.registry = registry
            unique_repositories.append(target_image.to_str())

        repositories = {
            "primary": primary_repositories,
            "unique": unique_repositories,
        }

        labels = {
            "dockerfile": self.workflow.prebuild_results.get("dockerfile_content", ""),
            "artefacts": self.workflow.prebuild_results.get("distgit_fetch_artefacts", ""),
            "logs": "\n".join(self.workflow.build_logs),
            "rpm-packages": "\n".join(self.workflow.postbuild_results.get("all_rpm_packages", "")),
            "repositories": json.dumps(repositories),
        }
        o.set_annotations_on_build(build_id, labels)
示例#3
0
 def test_use_service_account_token(self, kwargs, called):
     openshift_mock = flexmock(Openshift).should_receive('can_use_serviceaccount_token')
     if called:
         openshift_mock.once()
     else:
         openshift_mock.never()
     Openshift(APIS_PREFIX, "/oauth/authorize", **kwargs)
示例#4
0
文件: api.py 项目: mmilata/osbs
 def __init__(self, openshift_configuration, build_configuration):
     """ """
     self.os_conf = openshift_configuration
     self.build_conf = build_configuration
     self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                         openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                         kubelet_base=self.os_conf.get_kubelet_uri(),
                         verbose=self.os_conf.get_verbosity(),
                         username=self.os_conf.get_username(),
                         password=self.os_conf.get_password(),
                         verify_ssl=self.os_conf.get_verify_ssl())
     self._bm = None
示例#5
0
    def test_stream_logs_not_decoded(self, caplog):
        server = Openshift('http://oapi/v1/',
                           'v1',
                           'http://oauth/authorize',
                           k8s_api_url='http://api/v1/')

        logs = (
            u'Lógs'.encode('utf-8'),
            u'Lðgs'.encode('utf-8'),
        )

        fake_response = flexmock(status_code=httplib.OK, headers={})

        (fake_response.should_receive('iter_lines').and_yield(*logs).with_args(
            decode_unicode=False))

        (flexmock(requests).should_receive('request').and_return(fake_response)
         )

        with caplog.atLevel(logging.ERROR):
            for result in server.stream_logs('anything'):
                assert isinstance(result, six.binary_type)
示例#6
0
 def __init__(self, openshift_configuration, build_configuration):
     """ """
     self.os_conf = openshift_configuration
     self.build_conf = build_configuration
     self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                         openshift_api_version=self.os_conf.get_openshift_api_version(),
                         openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                         k8s_api_url=self.os_conf.get_k8s_api_uri(),
                         verbose=self.os_conf.get_verbosity(),
                         username=self.os_conf.get_username(),
                         password=self.os_conf.get_password(),
                         use_kerberos=self.os_conf.get_use_kerberos(),
                         client_cert=self.os_conf.get_client_cert(),
                         client_key=self.os_conf.get_client_key(),
                         kerberos_keytab=self.os_conf.get_kerberos_keytab(),
                         kerberos_principal=self.os_conf.get_kerberos_principal(),
                         kerberos_ccache=self.os_conf.get_kerberos_ccache(),
                         use_auth=self.os_conf.get_use_auth(),
                         verify_ssl=self.os_conf.get_verify_ssl(),
                         token=self.os_conf.get_oauth2_token(),
                         namespace=self.os_conf.get_namespace())
     self._bm = None
示例#7
0
 def __init__(self, openshift_configuration, build_configuration):
     """ """
     self.os_conf = openshift_configuration
     self.build_conf = build_configuration
     self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                         openshift_api_version=self.os_conf.get_openshift_api_version(),
                         openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                         k8s_api_url=self.os_conf.get_k8s_api_uri(),
                         verbose=self.os_conf.get_verbosity(),
                         username=self.os_conf.get_username(),
                         password=self.os_conf.get_password(),
                         use_kerberos=self.os_conf.get_use_kerberos(),
                         client_cert=self.os_conf.get_client_cert(),
                         client_key=self.os_conf.get_client_key(),
                         kerberos_keytab=self.os_conf.get_kerberos_keytab(),
                         kerberos_principal=self.os_conf.get_kerberos_principal(),
                         kerberos_ccache=self.os_conf.get_kerberos_ccache(),
                         use_auth=self.os_conf.get_use_auth(),
                         verify_ssl=self.os_conf.get_verify_ssl())
     self._bm = None
示例#8
0
class OSBS(object):
    """
    Note: all API methods return osbs.http.Response object. This is, due to historical
    reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
    and dict respectively.
    """

    _GIT_LABEL_KEYS = ('git-repo-name', 'git-branch')

    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_api_version=self.os_conf.get_openshift_api_version(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            k8s_api_url=self.os_conf.get_k8s_api_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            use_kerberos=self.os_conf.get_use_kerberos(),
                            client_cert=self.os_conf.get_client_cert(),
                            client_key=self.os_conf.get_client_key(),
                            kerberos_keytab=self.os_conf.get_kerberos_keytab(),
                            kerberos_principal=self.os_conf.get_kerberos_principal(),
                            kerberos_ccache=self.os_conf.get_kerberos_ccache(),
                            use_auth=self.os_conf.get_use_auth(),
                            verify_ssl=self.os_conf.get_verify_ssl(),
                            token=self.os_conf.get_oauth2_token(),
                            namespace=self.os_conf.get_namespace())
        self._bm = None

    @osbsapi
    def list_builds(self, field_selector=None, koji_task_id=None, running=None,
                    labels=None):
        """
        List builds with matching fields

        :param field_selector: str, field selector for Builds
        :param koji_task_id: str, only list builds for Koji Task ID
        :return: BuildResponse list
        """

        if running:
            running_fs = ",".join(["status!={status}".format(status=status.capitalize())
                                  for status in BUILD_FINISHED_STATES])
            if not field_selector:
                field_selector = running_fs
            else:
                field_selector = ','.join([field_selector, running_fs])
        response = self.os.list_builds(field_selector=field_selector,
                                       koji_task_id=koji_task_id, labels=labels)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(build))

        return build_list

    def watch_builds(self, field_selector=None):
        kwargs = {}
        if field_selector is not None:
            kwargs['fieldSelector'] = field_selector

        for changetype, obj in self.os.watch_resource("builds", **kwargs):
            yield changetype, obj

    @osbsapi
    def get_build(self, build_id):
        response = self.os.get_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def cancel_build(self, build_id):
        response = self.os.cancel_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def get_pod_for_build(self, build_id):
        """
        :return: PodResponse object for pod relating to the build
        """
        pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
        serialized_response = pods.json()
        pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
        if not pod_list:
            raise OsbsException("No pod for build")
        elif len(pod_list) != 1:
            raise OsbsException("Only one pod expected but %d returned",
                                len(pod_list))
        return pod_list[0]

    @osbsapi
    def get_build_request(self, build_type=None, inner_template=None,
                          outer_template=None, customize_conf=None):
        """
        return instance of BuildRequest

        :param build_type: str, unused
        :param inner_template: str, name of inner template for BuildRequest
        :param outer_template: str, name of outer template for BuildRequest
        :param customize_conf: str, name of customization config for BuildRequest
        :return: instance of BuildRequest
        """
        if build_type is not None:
            warnings.warn("build types are deprecated, do not use the build_type argument")

        build_request = BuildRequest(
            build_json_store=self.os_conf.get_build_json_store(),
            inner_template=inner_template,
            outer_template=outer_template,
            customize_conf=customize_conf)

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or
                memory_limit is not None or
                storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :return: instance of build.build_response.BuildResponse
        """
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        build = build_request.render()
        response = self.os.create_build(json.dumps(build))
        build_response = BuildResponse(response.json())
        return build_response

    def _get_running_builds_for_build_config(self, build_config_id):
        all_builds_for_bc = self.os.list_builds(build_config_id=build_config_id).json()['items']
        running = []
        for b in all_builds_for_bc:
            br = BuildResponse(b)
            if br.is_pending() or br.is_running():
                running.append(br)
        return running

    def _panic_msg_for_more_running_builds(self, build_config_name, builds):
        # this should never happen, but if it does, we want to know all the builds
        #  that were running at the time
        builds = ', '.join(['%s: %s' % (b.get_build_name(), b.status) for b in builds])
        msg = "Multiple builds for %s running, can't proceed: %s" % \
            (build_config_name, builds)
        return msg

    def _verify_labels_match(self, new_build_config, existing_build_config):
        new_labels = new_build_config['metadata']['labels']
        existing_labels = existing_build_config['metadata']['labels']

        for key in self._GIT_LABEL_KEYS:
            new_label_value = new_labels.get(key)
            existing_label_value = existing_labels.get(key)

            if (existing_label_value and existing_label_value != new_label_value):
                msg = (
                    'Git labels collide with existing build config "%s". '
                    'Existing labels: %r, '
                    'New labels: %r ') % (
                       existing_build_config['metadata']['name'],
                       existing_labels,
                       new_labels)
                raise OsbsValidationException(msg)

    def _get_existing_build_config(self, build_config):
        """
        Uses the given build config to find an existing matching build config.
        Build configs are a match if:
        - metadata.name are equal
        OR
        - metadata.labels.git-repo-name AND metadata.labels.git-branch are equal
        """

        git_labels = [(key, build_config['metadata']['labels'][key])
                      for key in self._GIT_LABEL_KEYS]
        name = build_config['metadata']['name']

        queries = (
            (self.os.get_build_config_by_labels, git_labels),
            (self.os.get_build_config, name),
        )

        existing_bc = None
        for func, arg in queries:
            try:
                existing_bc = func(arg)
                # build config found
                break
            except OsbsException as exc:
                # doesn't exist
                logger.info('Build config NOT found via %s: %s',
                            func.__name__, str(exc))
                continue

        return existing_bc

    def _verify_no_running_builds(self, build_config_name):
        running_builds = self._get_running_builds_for_build_config(build_config_name)
        rb_len = len(running_builds)

        if rb_len > 0:
            if rb_len == 1:
                rb = running_builds[0]
                msg = "Build %s for %s in state %s, can't proceed." % \
                    (rb.get_build_name(), build_config_name, rb.status)
            else:
                msg = self._panic_msg_for_more_running_builds(build_config_name, running_builds)
            raise OsbsException(msg)

    def _create_scratch_build(self, build_request):
        return self._create_build_directly(build_request)

    def _create_isolated_build(self, build_request):
        return self._create_build_directly(build_request,
                                           unique=('git-repo-name', 'git-branch',
                                                   'isolated', 'isolated-release'))

    def _create_build_directly(self, build_request, unique=None):
        logger.debug(build_request)
        build_json = build_request.render()
        build_json['kind'] = 'Build'
        build_json['spec']['serviceAccount'] = 'builder'

        builder_img = build_json['spec']['strategy']['customStrategy']['from']
        kind = builder_img['kind']
        if kind == 'ImageStreamTag':
            # Only BuildConfigs get to specify an ImageStreamTag. When
            # creating Builds directly we need to specify a
            # DockerImage.
            response = self.get_image_stream_tag(builder_img['name'])
            ref = response.json()['image']['dockerImageReference']
            builder_img['kind'] = 'DockerImage'
            builder_img['name'] = ref

        if unique:
            unique_labels = {}
            for u in unique:
                unique_labels[u] = build_json['metadata']['labels'][u]
            running_builds = self.list_builds(running=True, labels=unique_labels)
            if running_builds:
                raise RuntimeError('Matching build(s) already running: {0}'
                                   .format(', '.join(x.get_build_name() for x in running_builds)))

        return BuildResponse(self.os.create_build(build_json).json())

    def _get_image_stream_info_for_build_request(self, build_request):
        """Return ImageStream, and ImageStreamTag name for base_image of build_request

        If build_request is not auto instantiated, objects are not fetched
        and None, None is returned.
        """
        image_stream = None
        image_stream_tag_name = None

        if build_request.has_ist_trigger():
            image_stream_tag_id = build_request.spec.trigger_imagestreamtag.value
            image_stream_id, image_stream_tag_name = image_stream_tag_id.split(':')

            try:
                image_stream = self.get_image_stream(image_stream_id).json()
            except OsbsResponseException as x:
                if x.status_code != 404:
                    raise

            if image_stream:
                try:
                    self.get_image_stream_tag(image_stream_tag_id).json()
                except OsbsResponseException as x:
                    if x.status_code != 404:
                        raise

        return image_stream, image_stream_tag_name

    @retry_on_conflict
    def _update_build_config_when_exist(self, build_json):
        existing_bc = self._get_existing_build_config(build_json)
        self._verify_labels_match(build_json, existing_bc)
        # Existing build config may have a different name if matched by
        # git-repo-name and git-branch labels. Continue using existing
        # build config name.
        build_config_name = existing_bc['metadata']['name']
        logger.debug('existing build config name to be used "%s"',
                     build_config_name)
        self._verify_no_running_builds(build_config_name)

        # Remove nodeSelector, will be set from build_json for worker build
        old_nodeselector = existing_bc['spec'].pop('nodeSelector', None)
        logger.debug("removing build config's nodeSelector %s", old_nodeselector)

        # Remove koji_task_id
        koji_task_id = utils.graceful_chain_get(existing_bc, 'metadata', 'labels',
                                                'koji-task-id')
        if koji_task_id is not None:
            logger.debug("removing koji-task-id %r", koji_task_id)
            utils.graceful_chain_del(existing_bc, 'metadata', 'labels', 'koji-task-id')

        utils.buildconfig_update(existing_bc, build_json)
        # Reset name change that may have occurred during
        # update above, since renaming is not supported.
        existing_bc['metadata']['name'] = build_config_name
        logger.debug('build config for %s already exists, updating...',
                     build_config_name)

        self.os.update_build_config(build_config_name, json.dumps(existing_bc))
        return existing_bc

    @retry_on_conflict
    def _update_build_config_with_triggers(self, build_json, triggers):
        existing_bc = self._get_existing_build_config(build_json)
        existing_bc['spec']['triggers'] = triggers
        build_config_name = existing_bc['metadata']['name']
        self.os.update_build_config(build_config_name, json.dumps(existing_bc))
        return existing_bc

    def _create_build_config_and_build(self, build_request):
        build_json = build_request.render()
        api_version = build_json['apiVersion']
        if api_version != self.os_conf.get_openshift_api_version():
            raise OsbsValidationException('BuildConfig template has incorrect apiVersion (%s)' %
                                          api_version)

        build_config_name = build_json['metadata']['name']
        logger.debug('build config to be named "%s"', build_config_name)
        existing_bc = self._get_existing_build_config(build_json)

        image_stream, image_stream_tag_name = \
            self._get_image_stream_info_for_build_request(build_request)

        # Remove triggers in BuildConfig to avoid accidental
        # auto instance of Build. If defined, triggers will
        # be added to BuildConfig after ImageStreamTag object
        # is properly configured.
        triggers = build_json['spec'].pop('triggers', None)

        if existing_bc:
            build_config_name = existing_bc['metadata']['name']
            existing_bc = self._update_build_config_when_exist(build_json)

        else:
            logger.debug("build config for %s doesn't exist, creating...",
                         build_config_name)
            existing_bc = self.os.create_build_config(json.dumps(build_json)).json()

        if image_stream:
            changed_ist = self.ensure_image_stream_tag(image_stream,
                                                       image_stream_tag_name,
                                                       scheduled=True)
            logger.debug('Changed parent ImageStreamTag? %s', changed_ist)

        if triggers:
            existing_bc = self._update_build_config_with_triggers(build_json, triggers)

        if image_stream and triggers:
            prev_version = existing_bc['status']['lastVersion']
            build_id = self.os.wait_for_new_build_config_instance(
                build_config_name, prev_version)
            build = BuildResponse(self.os.get_build(build_id).json())
        else:
            response = self.os.start_build(build_config_name)
            build = BuildResponse(response.json())

        return build

    def _check_labels(self, repo_info):
        df_parser = repo_info.dockerfile_parser
        labels = utils.Labels(df_parser.labels)

        required_missing = False
        req_labels = {}
        # version label isn't used here, but is required label in Dockerfile
        # and is used and required for atomic reactor
        # if we don't catch error here, it will fail in atomic reactor later
        for label in [utils.Labels.LABEL_TYPE_NAME,
                      utils.Labels.LABEL_TYPE_COMPONENT,
                      utils.Labels.LABEL_TYPE_VERSION]:
            try:
                _, req_labels[label] = labels.get_name_and_value(label)
            except KeyError:
                required_missing = True
                logger.error("required label missing from Dockerfile : %s",
                             labels.get_name(label))

        if required_missing:
            raise OsbsValidationException("required label missing from Dockerfile")

        # Verify the name label meets requirements.
        # It is made up of slash-separated name components.
        #
        # When pulling an image, the first component of the name
        # pulled is interpreted as a registry name if it contains a
        # '.' character, and otherwise the configured registries are
        # queried in turn.
        #
        # Due to this, a name with '.' in its initial component will
        # be awkward to pull from a registry because the registry name
        # will have to be explicitly supplied, e.g. "docker pull
        # foo.bar/baz" will fail because the "foo.bar" registry cannot
        # be contacted.
        #
        # Avoid this awkwardness by forbidding '.' in the initial
        # component of the image name.
        name_components = req_labels[utils.Labels.LABEL_TYPE_NAME].split('/', 1)
        if '.' in name_components[0]:
            raise OsbsValidationException("initial image name component "
                                          "must not contain '.'")

        return req_labels, df_parser.baseimage

    def _get_flatpak_labels(self, module):
        module_name, module_stream, _ = utils.split_module_spec(module)

        return {
            utils.Labels.LABEL_TYPE_NAME: module_name,
            utils.Labels.LABEL_TYPE_COMPONENT: module_name,
            utils.Labels.LABEL_TYPE_VERSION: module_stream
        }, self.build_conf.get_flatpak_base_image()

    def _do_create_prod_build(self, git_uri, git_ref,
                              git_branch,
                              user,
                              component=None,
                              target=None,
                              architecture=None, yum_repourls=None,
                              koji_task_id=None,
                              scratch=None,
                              platform=None,
                              platforms=None,
                              build_type=None,
                              release=None,
                              inner_template=None,
                              outer_template=None,
                              customize_conf=None,
                              arrangement_version=None,
                              filesystem_koji_task_id=None,
                              koji_upload_dir=None,
                              is_auto=False,
                              koji_parent_build=None,
                              isolated=None,
                              flatpak=False,
                              module=None,
                              module_compose_id=None,
                              signing_intent=None,
                              compose_ids=None,
                              **kwargs):

        if flatpak:
            if module is None:
                raise ValueError("Flatpak build missing required parameter 'module'")
            if isolated:
                # Flatpak builds from a particular stream autogenerate the release
                # as <module_version>.<n>; it doesn't make sense to make a fix
                # from specific one of these autogenerated version. What an isolated
                # fix for module requires will have to be determined from experience.
                raise ValueError("Flatpak build cannot be isolated")

        repo_info = utils.get_repo_info(git_uri, git_ref, git_branch=git_branch)
        build_request = self.get_build_request(inner_template=inner_template,
                                               outer_template=outer_template,
                                               customize_conf=customize_conf)

        if flatpak:
            req_labels, base_image = self._get_flatpak_labels(module)
        else:
            req_labels, base_image = self._check_labels(repo_info)

        if not git_branch:
            raise OsbsValidationException("required argument 'git_branch' can't be None")

        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            git_branch=git_branch,
            user=user,
            component=req_labels[utils.Labels.LABEL_TYPE_COMPONENT],
            build_image=self.build_conf.get_build_image(),
            build_imagestream=self.build_conf.get_build_imagestream(),
            base_image=base_image,
            name_label=req_labels[utils.Labels.LABEL_TYPE_NAME],
            registry_uris=self.build_conf.get_registry_uris(),
            registry_secrets=self.build_conf.get_registry_secrets(),
            source_registry_uri=self.build_conf.get_source_registry_uri(),
            registry_api_versions=self.build_conf.get_registry_api_versions(platform),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            builder_openshift_url=self.os_conf.get_builder_openshift_url(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            koji_certs_secret=self.build_conf.get_koji_certs_secret(),
            koji_task_id=koji_task_id,
            koji_use_kerberos=self.build_conf.get_koji_use_kerberos(),
            koji_kerberos_keytab=self.build_conf.get_koji_kerberos_keytab(),
            koji_kerberos_principal=self.build_conf.get_koji_kerberos_principal(),
            flatpak=flatpak,
            module=module,
            module_compose_id=module_compose_id,
            flatpak_base_image=self.build_conf.get_flatpak_base_image(),
            odcs_url=self.build_conf.get_odcs_url(),
            odcs_insecure=self.build_conf.get_odcs_insecure(),
            odcs_openidc_secret=self.build_conf.get_odcs_openidc_secret(),
            odcs_ssl_secret=self.build_conf.get_odcs_ssl_secret(),
            pdc_url=self.build_conf.get_pdc_url(),
            pdc_insecure=self.build_conf.get_pdc_insecure(),
            architecture=architecture,
            platforms=platforms,
            platform=platform,
            build_type=build_type,
            release=release,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            distribution_scope=self.build_conf.get_distribution_scope(),
            yum_repourls=yum_repourls,
            proxy=self.build_conf.get_proxy(),
            pulp_secret=self.build_conf.get_pulp_secret(),
            smtp_host=self.build_conf.get_smtp_host(),
            smtp_from=self.build_conf.get_smtp_from(),
            smtp_additional_addresses=self.build_conf.get_smtp_additional_addresses(),
            smtp_error_addresses=self.build_conf.get_smtp_error_addresses(),
            smtp_email_domain=self.build_conf.get_smtp_email_domain(),
            smtp_to_submitter=self.build_conf.get_smtp_to_submitter(),
            smtp_to_pkgowner=self.build_conf.get_smtp_to_pkgowner(),
            use_auth=self.build_conf.get_builder_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            builder_build_json_dir=self.build_conf.get_builder_build_json_store(),
            scratch=self.build_conf.get_scratch(scratch),
            reactor_config_secret=self.build_conf.get_reactor_config_secret(),
            client_config_secret=self.build_conf.get_client_config_secret(),
            token_secrets=self.build_conf.get_token_secrets(),
            arrangement_version=arrangement_version,
            info_url_format=self.build_conf.get_info_url_format(),
            artifacts_allowed_domains=self.build_conf.get_artifacts_allowed_domains(),
            equal_labels=self.build_conf.get_equal_labels(),
            platform_node_selector=self.build_conf.get_platform_node_selector(platform),
            scratch_build_node_selector=self.build_conf.get_scratch_build_node_selector(),
            explicit_build_node_selector=self.build_conf.get_explicit_build_node_selector(),
            isolated_build_node_selector=self.build_conf.get_isolated_build_node_selector(),
            auto_build_node_selector=self.build_conf.get_auto_build_node_selector(),
            is_auto=is_auto,
            filesystem_koji_task_id=filesystem_koji_task_id,
            koji_upload_dir=koji_upload_dir,
            platform_descriptors=self.build_conf.get_platform_descriptors(),
            koji_parent_build=koji_parent_build,
            group_manifests=self.os_conf.get_group_manifests(),
            isolated=isolated,
            prefer_schema1_digest=self.build_conf.get_prefer_schema1_digest(),
            signing_intent=signing_intent,
            compose_ids=compose_ids
        )
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        build_request.set_repo_info(repo_info)
        if build_request.scratch:
            response = self._create_scratch_build(build_request)
        elif build_request.isolated:
            response = self._create_isolated_build(build_request)
        else:
            response = self._create_build_config_and_build(build_request)
        logger.debug(response.json)
        return response

    @osbsapi
    def create_prod_build(self, *args, **kwargs):
        """
        Create a production build

        :param git_uri: str, URI of git repository
        :param git_ref: str, reference to commit
        :param git_branch: str, branch name
        :param user: str, user name
        :param component: str, not used anymore
        :param target: str, koji target
        :param architecture: str, build architecture
        :param yum_repourls: list, URLs for yum repos
        :param koji_task_id: int, koji task ID requesting build
        :param scratch: bool, this is a scratch build
        :param platform: str, the platform name
        :param platforms: list<str>, the name of each platform
        :param release: str, the release value to use
        :param inner_template: str, name of inner template for BuildRequest
        :param outer_template: str, name of outer template for BuildRequest
        :param customize_conf: str, name of customization config for BuildRequest
        :param arrangement_version: int, numbered arrangement of plugins for orchestration workflow
        :param signing_intent: str, signing intent of the ODCS composes
        :param compose_ids: list<int>, ODCS composes used
        :return: BuildResponse instance
        """
        warnings.warn("prod (all-in-one) builds are deprecated, "
                      "please use create_orchestrator_build")
        return self._do_create_prod_build(*args, **kwargs)

    @osbsapi
    def create_build(self, **kwargs):
        """
        take input args, create build request and submit the build

        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        return self._do_create_prod_build(**kwargs)

    @osbsapi
    def create_worker_build(self, **kwargs):
        """
        Create a worker build

        Pass through method to create_prod_build with the following
        modifications:
            - platform param is required
            - release param is required
            - arrangement_version param is required, which is used to
              select which worker_inner:n.json template to use
            - inner template set to worker_inner:n.json if not set
            - outer template set to worker.json if not set
            - customize configuration set to worker_customize.json if not set

        :return: BuildResponse instance
        """
        missing = set()
        for required in ('platform', 'release', 'arrangement_version'):
            if not kwargs.get(required):
                missing.add(required)

        if missing:
            raise ValueError("Worker build missing required parameters: %s" %
                             missing)

        if kwargs.get('platforms'):
            raise ValueError("Worker build called with unwanted platforms param")

        arrangement_version = kwargs['arrangement_version']
        kwargs.setdefault('inner_template', WORKER_INNER_TEMPLATE.format(
            arrangement_version=arrangement_version))
        kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE)
        kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF)
        kwargs['build_type'] = BUILD_TYPE_WORKER
        try:
            return self._do_create_prod_build(**kwargs)
        except IOError as ex:
            if os.path.basename(ex.filename) == kwargs['inner_template']:
                raise OsbsValidationException("worker invalid arrangement_version %s" %
                                              arrangement_version)

            raise

    @osbsapi
    def create_orchestrator_build(self, **kwargs):
        """
        Create an orchestrator build

        Pass through method to create_prod_build with the following
        modifications:
            - platforms param is required
            - arrangement_version param may be used to select which
              orchestrator_inner:n.json template to use
            - inner template set to orchestrator_inner:n.json if not set
            - outer template set to orchestrator.json if not set
            - customize configuration set to orchestrator_customize.json if not set

        :return: BuildResponse instance
        """
        if not kwargs.get('platforms'):
            raise ValueError('Orchestrator build requires platforms param')

        if not self.can_orchestrate():
            raise OsbsOrchestratorNotEnabled("can't create orchestrate build "
                                             "when can_orchestrate isn't enabled")
        extra = [x for x in ('platform',) if kwargs.get(x)]
        if extra:
            raise ValueError("Orchestrator build called with unwanted parameters: %s" %
                             extra)

        arrangement_version = kwargs.setdefault('arrangement_version',
                                                self.build_conf.get_arrangement_version())

        kwargs.setdefault('inner_template', ORCHESTRATOR_INNER_TEMPLATE.format(
            arrangement_version=arrangement_version))
        kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE)
        kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF)
        kwargs['build_type'] = BUILD_TYPE_ORCHESTRATOR
        try:
            return self._do_create_prod_build(**kwargs)
        except IOError as ex:
            if os.path.basename(ex.filename) == kwargs['inner_template']:
                raise OsbsValidationException("orchestrator invalid arrangement_version %s" %
                                              arrangement_version)

            raise

    def _decode_build_logs_generator(self, logs):
        for line in logs:
            line = line.decode("utf-8").rstrip()
            yield line

    @osbsapi
    def get_build_logs(self, build_id, follow=False, build_json=None, wait_if_missing=False,
                       decode=False):
        """
        provide logs from build

        NOTE: Since atomic-reactor 1.6.25, logs are always in UTF-8, so if
        asked to decode, we assume that is the encoding in use. Otherwise, we
        return the bytes exactly as they came from the container.

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :param decode: bool, whether or not to decode logs as utf-8
        :return: None, bytes, or iterable of bytes
        """
        logs = self.os.logs(build_id, follow=follow, build_json=build_json,
                            wait_if_missing=wait_if_missing)

        if decode and isinstance(logs, GeneratorType):
            return self._decode_build_logs_generator(logs)

        # str or None returned from self.os.logs()
        if decode and logs is not None:
            logs = logs.decode("utf-8").rstrip()

        return logs

    @staticmethod
    def _parse_build_log_entry(entry):
        items = entry.split()
        if len(items) < 4:
            # This is not a valid build log entry
            return (None, entry)

        platform = items[2]
        if not platform.startswith("platform:"):
            # Line logged without using the appropriate LoggerAdapter
            return (None, entry)

        platform = platform.split(":", 1)[1]
        if platform == "-":
            return (None, entry)  # proper orchestrator build log entry

        # Anything else should be a worker build log entry, so we strip off
        # the leading 8 wrapping orchestrator log fields:
        # <date> <time> <platform> - <name> - <level> -
        plen = sum(len(items[i]) + 1  # include trailing space
                   for i in range(8))
        line = entry[plen:]
        # if the 3rd field is "platform:-", we strip it out
        items = line.split()
        if len(items) > 2 and items[2] == "platform:-":
            plen = sum(len(items[i]) + 1  # include trailing space
                       for i in range(3))
            line = "%s %s %s" % (items[0], items[1], line[plen:])
        return (platform, line)

    @osbsapi
    def get_orchestrator_build_logs(self, build_id, follow=False, wait_if_missing=False):
        """
        provide logs from orchestrator build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param wait_if_missing: bool, if build doesn't exist, wait
        :return: generator yielding objects with attributes 'platform' and 'line'
        """
        logs = self.get_build_logs(build_id=build_id, follow=follow,
                                   wait_if_missing=wait_if_missing, decode=True)

        if logs is None:
            return
        if isinstance(logs, GeneratorType):
            for entries in logs:
                for entry in entries.splitlines():
                    yield LogEntry(*self._parse_build_log_entry(entry))
        else:
            for entry in logs.splitlines():
                yield LogEntry(*self._parse_build_log_entry(entry))

    @osbsapi
    def get_docker_build_logs(self, build_id, decode_logs=True, build_json=None):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id)
            build_response = BuildResponse(build.json())
        else:
            build_response = BuildResponse(build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id):
        response = self.os.wait_for_build_to_finish(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id):
        response = self.os.wait_for_build_to_get_scheduled(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def update_labels_on_build(self, build_id, labels):
        response = self.os.update_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def set_labels_on_build(self, build_id, labels):
        response = self.os.set_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def update_labels_on_build_config(self, build_config_id, labels):
        response = self.os.update_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def set_labels_on_build_config(self, build_config_id, labels):
        response = self.os.set_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def update_annotations_on_build(self, build_id, annotations):
        return self.os.update_annotations_on_build(build_id, annotations)

    @osbsapi
    def set_annotations_on_build(self, build_id, annotations):
        return self.os.set_annotations_on_build(build_id, annotations)

    @osbsapi
    def import_image(self, name):
        """
        Import image tags from a Docker registry into an ImageStream

        :return: bool, whether new tags were imported
        """

        return self.os.import_image(name)

    @osbsapi
    def get_token(self):
        if self.os.use_kerberos:
            return self.os.get_oauth_token()
        else:
            if self.os.token:
                return self.os.token

            raise OsbsValidationException("no token stored for %s" % self.os_conf.conf_section)

    @osbsapi
    def login(self, token=None, username=None, password=None):
        if self.os.use_kerberos:
            raise OsbsValidationException("can't use login when using kerberos")

        if not token:
            if username:
                self.os.username = username
            else:
                try:
                    self.os.username = raw_input("Username: "******"Username: "******"token is not valid")
            raise

        token_file = utils.get_instance_token_file_name(self.os_conf.conf_section)
        token_file_dir = os.path.dirname(token_file)

        if not os.path.exists(token_file_dir):
            os.makedirs(token_file_dir)

        # Inspired by http://stackoverflow.com/a/15015748/5998718
        # For security, remove file with potentially elevated mode
        if os.path.exists(token_file):
            os.remove(token_file)

        # Open file descriptor
        fdesc = os.open(token_file,
                        os.O_WRONLY | os.O_CREAT | os.O_EXCL,
                        stat.S_IRUSR | stat.S_IWUSR)

        with os.fdopen(fdesc, 'w') as f:
            f.write(token + '\n')

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()

    @osbsapi
    def get_serviceaccount_tokens(self, username="******"):
        return self.os.get_serviceaccount_tokens(username)

    @osbsapi
    def get_image_stream_tag(self, tag_id):
        return self.os.get_image_stream_tag(tag_id)

    @osbsapi
    def ensure_image_stream_tag(self, stream, tag_name, scheduled=False):
        """Ensures the tag is monitored in ImageStream

        :param stream: dict, ImageStream object
        :param tag_name: str, name of tag to check, without name of
                              ImageStream as prefix
        :param scheduled: bool, if True, importPolicy.scheduled will be
                                set to True in ImageStreamTag
        :return: bool, whether or not modifications were performed
        """
        img_stream_tag_file = os.path.join(self.os_conf.get_build_json_store(),
                                           'image_stream_tag.json')
        tag_template = json.load(open(img_stream_tag_file))
        return self.os.ensure_image_stream_tag(stream, tag_name, tag_template,
                                               scheduled)

    @osbsapi
    def get_image_stream(self, stream_id):
        return self.os.get_image_stream(stream_id)

    @osbsapi
    def create_image_stream(self, name, docker_image_repository,
                            insecure_registry=False):
        """
        Create an ImageStream object

        Raises exception on error

        :param name: str, name of ImageStream
        :param docker_image_repository: str, pull spec for docker image
               repository
        :param insecure_registry: bool, whether plain HTTP should be used
        :return: response
        """
        img_stream_file = os.path.join(self.os_conf.get_build_json_store(), 'image_stream.json')
        stream = json.load(open(img_stream_file))
        stream['metadata']['name'] = name
        stream['spec']['dockerImageRepository'] = docker_image_repository
        if insecure_registry:
            stream['metadata'].setdefault('annotations', {})
            insecure_annotation = 'openshift.io/image.insecureRepository'
            stream['metadata']['annotations'][insecure_annotation] = 'true'

        return self.os.create_image_stream(json.dumps(stream))

    def _load_quota_json(self, quota_name=None):
        quota_file = os.path.join(self.os_conf.get_build_json_store(),
                                  'pause_quota.json')
        with open(quota_file) as fp:
            quota_json = json.load(fp)

        if quota_name:
            quota_json['metadata']['name'] = quota_name

        return quota_json['metadata']['name'], quota_json

    @osbsapi
    def pause_builds(self, quota_name=None):
        # First, set quota so 0 pods are allowed to be running
        quota_name, quota_json = self._load_quota_json(quota_name)
        self.os.create_resource_quota(quota_name, quota_json)

        # Now wait for running builds to finish
        while True:
            field_selector = ','.join(['status=%s' % status.capitalize()
                                       for status in BUILD_RUNNING_STATES])
            builds = self.list_builds(field_selector)

            # Double check builds are actually in running state.
            running_builds = [build for build in builds if build.is_running()]

            if not running_builds:
                break

            name = running_builds[0].get_build_name()
            logger.info("waiting for build to finish: %s", name)
            self.wait_for_build_to_finish(name)

    @osbsapi
    def resume_builds(self, quota_name=None):
        quota_name, _ = self._load_quota_json(quota_name)
        self.os.delete_resource_quota(quota_name)

    # implements subset of OpenShift's export logic in pkg/cmd/cli/cmd/exporter.go
    @staticmethod
    def _prepare_resource(resource):
        utils.graceful_chain_del(resource, 'metadata', 'resourceVersion')

    @osbsapi
    def dump_resource(self, resource_type):
        return self.os.dump_resource(resource_type).json()

    @osbsapi
    def restore_resource(self, resource_type, resources, continue_on_error=False):
        nfailed = 0
        for r in resources["items"]:
            name = utils.graceful_chain_get(r, 'metadata', 'name') or '(no name)'
            logger.debug("restoring %s/%s", resource_type, name)
            try:
                self._prepare_resource(r)
                self.os.restore_resource(resource_type, r)
            except Exception:
                if continue_on_error:
                    logger.exception("failed to restore %s/%s", resource_type, name)
                    nfailed += 1
                else:
                    raise

        if continue_on_error:
            ntotal = len(resources["items"])
            logger.info("restored %s/%s %s", ntotal - nfailed, ntotal, resource_type)

    @osbsapi
    def get_compression_extension(self):
        """
        Find the filename extension for the 'docker save' output, which
        may or may not be compressed.

        Raises OsbsValidationException if the extension cannot be
        determined due to a configuration error.

        :returns: str including leading dot, or else None if no compression
        """

        build_request = BuildRequest(build_json_store=self.os_conf.get_build_json_store())
        inner = build_request.inner_template
        postbuild_plugins = inner.get('postbuild_plugins', [])
        for plugin in postbuild_plugins:
            if plugin.get('name') == 'compress':
                args = plugin.get('args', {})
                method = args.get('method', 'gzip')
                if method == 'gzip':
                    return '.gz'
                elif method == 'lzma':
                    return '.xz'
                raise OsbsValidationException("unknown compression method '%s'"
                                              % method)

        return None

    @osbsapi
    def list_resource_quotas(self):
        return self.os.list_resource_quotas().json()

    @osbsapi
    def get_resource_quota(self, quota_name):
        return self.os.get_resource_quota(quota_name).json()

    @osbsapi
    def can_orchestrate(self):
        return self.build_conf.get_can_orchestrate()

    @osbsapi
    def create_config_map(self, name, data):
        """
        Create an ConfigMap object on the server

        Raises exception on error

        :param name: str, name of configMap
        :param data: dict, dictionary of data to be stored
        :returns: ConfigMapResponse containing the ConfigMap with name and data
        """
        config_data_file = os.path.join(self.os_conf.get_build_json_store(), 'config_map.json')
        config_data = json.load(open(config_data_file))
        config_data['metadata']['name'] = name
        data_dict = {}
        for key, value in data.items():
            data_dict[key] = json.dumps(value)
        config_data['data'] = data_dict

        response = self.os.create_config_map(config_data)
        config_map_response = ConfigMapResponse(response.json())
        return config_map_response

    @osbsapi
    def get_config_map(self, name):
        """
        Get a ConfigMap object from the server

        Raises exception on error

        :param name: str, name of configMap to get from the server
        :returns: ConfigMapResponse containing the ConfigMap with the requested name
        """
        response = self.os.get_config_map(name)
        config_map_response = ConfigMapResponse(response.json())
        return config_map_response

    @osbsapi
    def delete_config_map(self, name):
        """
        Delete a ConfigMap object from the server

        Raises exception on error

        :param name: str, name of configMap to delete from the server
        :returns: True on success
        """
        response = self.os.delete_config_map(name)
        return response

    @contextmanager
    def retries_disabled(self):
        """
        Context manager to disable retries on requests
        :returns: OSBS object
        """
        self.os.retries_enabled = False
        yield
        self.os.retries_enabled = True
示例#9
0
class OSBS(object):
    """
    Note: all API methods return osbs.http.Response object. This is, due to historical
    reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
    and dict respectively.
    """

    _GIT_LABEL_KEYS = ('git-repo-name', 'git-branch')

    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(
            openshift_api_url=self.os_conf.get_openshift_api_uri(),
            openshift_api_version=self.os_conf.get_openshift_api_version(),
            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
            k8s_api_url=self.os_conf.get_k8s_api_uri(),
            verbose=self.os_conf.get_verbosity(),
            username=self.os_conf.get_username(),
            password=self.os_conf.get_password(),
            use_kerberos=self.os_conf.get_use_kerberos(),
            client_cert=self.os_conf.get_client_cert(),
            client_key=self.os_conf.get_client_key(),
            kerberos_keytab=self.os_conf.get_kerberos_keytab(),
            kerberos_principal=self.os_conf.get_kerberos_principal(),
            kerberos_ccache=self.os_conf.get_kerberos_ccache(),
            use_auth=self.os_conf.get_use_auth(),
            verify_ssl=self.os_conf.get_verify_ssl(),
            token=self.os_conf.get_oauth2_token(),
            namespace=self.os_conf.get_namespace())
        self._bm = None

    @osbsapi
    def list_builds(self, field_selector=None, koji_task_id=None):
        """
        List builds with matching fields

        :param field_selector: str, field selector for Builds
        :param koji_task_id: str, only list builds for Koji Task ID
        :return: BuildResponse list
        """

        response = self.os.list_builds(field_selector=field_selector,
                                       koji_task_id=koji_task_id)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(build))
        return build_list

    def watch_builds(self, field_selector=None):
        kwargs = {}
        if field_selector is not None:
            kwargs['fieldSelector'] = field_selector

        for changetype, obj in self.os.watch_resource("builds", **kwargs):
            yield changetype, obj

    @osbsapi
    def get_build(self, build_id):
        response = self.os.get_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def cancel_build(self, build_id):
        response = self.os.cancel_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def get_pod_for_build(self, build_id):
        """
        :return: PodResponse object for pod relating to the build
        """
        pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
        serialized_response = pods.json()
        pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
        if not pod_list:
            raise OsbsException("No pod for build")
        elif len(pod_list) != 1:
            raise OsbsException("Only one pod expected but %d returned",
                                len(pod_list))
        return pod_list[0]

    @osbsapi
    def get_build_request(self,
                          build_type=None,
                          inner_template=None,
                          outer_template=None,
                          customize_conf=None):
        """
        return instance of BuildRequest

        :param build_type: str, unused
        :param inner_template: str, name of inner template for BuildRequest
        :param outer_template: str, name of outer template for BuildRequest
        :param customize_conf: str, name of customization config for BuildRequest
        :return: instance of BuildRequest
        """
        if build_type is not None:
            warnings.warn(
                "build types are deprecated, do not use the build_type argument"
            )

        build_request = BuildRequest(
            build_json_store=self.os_conf.get_build_json_store(),
            inner_template=inner_template,
            outer_template=outer_template,
            customize_conf=customize_conf)

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or memory_limit is not None
                or storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :return: instance of build.build_response.BuildResponse
        """
        build_request.set_openshift_required_version(
            self.os_conf.get_openshift_required_version())
        build = build_request.render()
        response = self.os.create_build(json.dumps(build))
        build_response = BuildResponse(response.json())
        return build_response

    def _get_running_builds_for_build_config(self, build_config_id):
        all_builds_for_bc = self.os.list_builds(
            build_config_id=build_config_id).json()['items']
        running = []
        for b in all_builds_for_bc:
            br = BuildResponse(b)
            if br.is_pending() or br.is_running():
                running.append(br)
        return running

    def _panic_msg_for_more_running_builds(self, build_config_name, builds):
        # this should never happen, but if it does, we want to know all the builds
        #  that were running at the time
        builds = ', '.join(
            ['%s: %s' % (b.get_build_name(), b.status) for b in builds])
        msg = 'Multiple builds for %s running, can\'t proceed: %s' % \
            (build_config_name, builds)
        return msg

    def _verify_labels_match(self, new_build_config, existing_build_config):
        new_labels = new_build_config['metadata']['labels']
        existing_labels = existing_build_config['metadata']['labels']

        for key in self._GIT_LABEL_KEYS:
            new_label_value = new_labels.get(key)
            existing_label_value = existing_labels.get(key)

            if (existing_label_value
                    and existing_label_value != new_label_value):

                msg = ('Git labels collide with existing build config "%s". '
                       'Existing labels: %r, '
                       'New labels: %r ') % (
                           existing_build_config['metadata']['name'],
                           existing_labels, new_labels)
                raise OsbsValidationException(msg)

    def _get_existing_build_config(self, build_config):
        """
        Uses the given build config to find an existing matching build config.
        Build configs are a match if:
        - metadata.name are equal
        OR
        - metadata.labels.git-repo-name AND metadata.labels.git-branch are equal
        """

        git_labels = [(key, build_config['metadata']['labels'][key])
                      for key in self._GIT_LABEL_KEYS]
        name = build_config['metadata']['name']

        queries = (
            (self.os.get_build_config_by_labels, git_labels),
            (self.os.get_build_config, name),
        )

        existing_bc = None
        for func, arg in queries:
            try:
                existing_bc = func(arg)
                # build config found
                break
            except OsbsException as exc:
                # doesn't exist
                logger.info('Build config NOT found via %s: %s', func.__name__,
                            str(exc))
                continue

        return existing_bc

    def _verify_no_running_builds(self, build_config_name):
        running_builds = self._get_running_builds_for_build_config(
            build_config_name)
        rb_len = len(running_builds)

        if rb_len > 0:
            if rb_len == 1:
                rb = running_builds[0]
                msg = 'Build %s for %s in state %s, can\'t proceed.' % \
                    (rb.get_build_name(), build_config_name, rb.status)
            else:
                msg = self._panic_msg_for_more_running_builds(
                    build_config_name, running_builds)
            raise OsbsException(msg)

    def _create_scratch_build(self, build_request):
        logger.debug(build_request)
        build_json = build_request.render()
        build_json['kind'] = 'Build'
        build_json['spec']['serviceAccount'] = 'builder'
        build_json['metadata']['labels']['scratch'] = 'true'

        if build_request.low_priority_node_selector:
            build_json['spec'][
                'nodeSelector'] = build_request.low_priority_node_selector

        builder_img = build_json['spec']['strategy']['customStrategy']['from']
        kind = builder_img['kind']
        if kind == 'ImageStreamTag':
            # Only BuildConfigs get to specify an ImageStreamTag. When
            # creating Builds directly we need to specify a
            # DockerImage.
            response = self.get_image_stream_tag(builder_img['name'])
            ref = response.json()['image']['dockerImageReference']
            builder_img['kind'] = 'DockerImage'
            builder_img['name'] = ref

        output_image_name = build_json['spec']['output']['to']['name']
        # Reuse random string and timestamp values.
        build_config_name = 'scratch-%s-%s' % tuple(
            output_image_name.rsplit('-', 2)[-2:])
        logger.debug('starting scratch build %s', build_config_name)
        build_json['metadata']['name'] = build_config_name
        return BuildResponse(self.os.create_build(build_json).json())

    def _get_image_stream_info_for_build_request(self, build_request):
        """Return ImageStream, and ImageStreamTag name for base_image of build_request

        If build_request is not auto instantiated, objects are not fetched
        and None, None is returned.
        """
        image_stream = None
        image_stream_tag_name = None

        if build_request.has_ist_trigger():
            image_stream_tag_id = build_request.spec.trigger_imagestreamtag.value
            image_stream_id, image_stream_tag_name = image_stream_tag_id.split(
                ':')

            try:
                image_stream = self.get_image_stream(image_stream_id).json()
            except OsbsResponseException as x:
                if x.status_code != 404:
                    raise

            if image_stream:
                try:
                    self.get_image_stream_tag(image_stream_tag_id).json()
                except OsbsResponseException as x:
                    if x.status_code != 404:
                        raise

        return image_stream, image_stream_tag_name

    def _create_build_config_and_build(self, build_request):
        build_json = build_request.render()
        api_version = build_json['apiVersion']
        if api_version != self.os_conf.get_openshift_api_version():
            raise OsbsValidationException(
                'BuildConfig template has incorrect apiVersion (%s)' %
                api_version)

        build_config_name = build_json['metadata']['name']
        logger.debug('build config to be named "%s"', build_config_name)
        existing_bc = self._get_existing_build_config(build_json)

        image_stream, image_stream_tag_name = \
            self._get_image_stream_info_for_build_request(build_request)

        # Remove triggers in BuildConfig to avoid accidental
        # auto instance of Build. If defined, triggers will
        # be added to BuildConfig after ImageStreamTag object
        # is properly configured.
        triggers = build_json['spec'].pop('triggers', None)

        if existing_bc:
            self._verify_labels_match(build_json, existing_bc)
            # Existing build config may have a different name if matched by
            # git-repo-name and git-branch labels. Continue using existing
            # build config name.
            build_config_name = existing_bc['metadata']['name']
            logger.debug('existing build config name to be used "%s"',
                         build_config_name)
            self._verify_no_running_builds(build_config_name)

            utils.buildconfig_update(existing_bc, build_json)
            # Reset name change that may have occurred during
            # update above, since renaming is not supported.
            existing_bc['metadata']['name'] = build_config_name
            logger.debug('build config for %s already exists, updating...',
                         build_config_name)

            self.os.update_build_config(build_config_name,
                                        json.dumps(existing_bc))
            if triggers:
                # Retrieve updated version to pick up lastVersion
                existing_bc = self._get_existing_build_config(existing_bc)

        else:
            logger.debug('build config for %s doesn\'t exist, creating...',
                         build_config_name)
            existing_bc = self.os.create_build_config(
                json.dumps(build_json)).json()

        if image_stream:
            changed_ist = self.ensure_image_stream_tag(image_stream,
                                                       image_stream_tag_name,
                                                       scheduled=True)
            logger.debug('Changed parent ImageStreamTag? %s', changed_ist)

        if triggers:
            existing_bc['spec']['triggers'] = triggers
            self.os.update_build_config(build_config_name,
                                        json.dumps(existing_bc))

        if image_stream and triggers:
            prev_version = existing_bc['status']['lastVersion']
            build_id = self.os.wait_for_new_build_config_instance(
                build_config_name, prev_version)
            build = BuildResponse(self.os.get_build(build_id).json())
        else:
            response = self.os.start_build(build_config_name)
            build = BuildResponse(response.json())

        return build

    def _do_create_prod_build(
            self,
            git_uri,
            git_ref,
            git_branch,  # may be None
            user,
            component=None,
            target=None,
            architecture=None,
            yum_repourls=None,
            koji_task_id=None,
            scratch=None,
            platform=None,
            platforms=None,
            release=None,
            inner_template=None,
            outer_template=None,
            customize_conf=None,
            arrangement_version=None,
            **kwargs):
        df_parser = utils.get_df_parser(git_uri,
                                        git_ref,
                                        git_branch=git_branch)
        build_request = self.get_build_request(inner_template=inner_template,
                                               outer_template=outer_template,
                                               customize_conf=customize_conf)
        labels = utils.Labels(df_parser.labels)

        required_missing = False
        req_labels = {}
        # version label isn't used here, but is required label in Dockerfile
        # and is used and required for atomic reactor
        # if we don't catch error here, it will fail in atomic reactor later
        for label in [
                utils.Labels.LABEL_TYPE_NAME,
                utils.Labels.LABEL_TYPE_COMPONENT,
                utils.Labels.LABEL_TYPE_VERSION
        ]:
            try:
                _, req_labels[label] = labels.get_name_and_value(label)
            except KeyError:
                required_missing = True
                logger.error("required label missing from Dockerfile : %s",
                             labels.get_name(label))

        if required_missing:
            raise OsbsValidationException(
                "required label missing from Dockerfile")

        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            git_branch=git_branch,
            user=user,
            component=req_labels[utils.Labels.LABEL_TYPE_COMPONENT],
            build_image=self.build_conf.get_build_image(),
            build_imagestream=self.build_conf.get_build_imagestream(),
            base_image=df_parser.baseimage,
            name_label=req_labels[utils.Labels.LABEL_TYPE_NAME],
            registry_uris=self.build_conf.get_registry_uris(),
            registry_secrets=self.build_conf.get_registry_secrets(),
            source_registry_uri=self.build_conf.get_source_registry_uri(),
            registry_api_versions=self.build_conf.get_registry_api_versions(),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            builder_openshift_url=self.os_conf.get_builder_openshift_url(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            koji_certs_secret=self.build_conf.get_koji_certs_secret(),
            koji_task_id=koji_task_id,
            koji_use_kerberos=self.build_conf.get_koji_use_kerberos(),
            koji_kerberos_keytab=self.build_conf.get_koji_kerberos_keytab(),
            koji_kerberos_principal=self.build_conf.
            get_koji_kerberos_principal(),
            architecture=architecture,
            platform=platform,
            platforms=platforms,
            release=release,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(
            ),
            distribution_scope=self.build_conf.get_distribution_scope(),
            yum_repourls=yum_repourls,
            proxy=self.build_conf.get_proxy(),
            pulp_secret=self.build_conf.get_pulp_secret(),
            smtp_host=self.build_conf.get_smtp_host(),
            smtp_from=self.build_conf.get_smtp_from(),
            smtp_additional_addresses=self.build_conf.
            get_smtp_additional_addresses(),
            smtp_error_addresses=self.build_conf.get_smtp_error_addresses(),
            smtp_email_domain=self.build_conf.get_smtp_email_domain(),
            smtp_to_submitter=self.build_conf.get_smtp_to_submitter(),
            smtp_to_pkgowner=self.build_conf.get_smtp_to_pkgowner(),
            use_auth=self.build_conf.get_builder_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            nfs_server_path=self.os_conf.get_nfs_server_path(),
            nfs_dest_dir=self.build_conf.get_nfs_destination_dir(),
            builder_build_json_dir=self.build_conf.
            get_builder_build_json_store(),
            scratch=self.build_conf.get_scratch(scratch),
            reactor_config_secret=self.build_conf.get_reactor_config_secret(),
            client_config_secret=self.build_conf.get_client_config_secret(),
            token_secrets=self.build_conf.get_token_secrets(),
            arrangement_version=arrangement_version,
            info_url_format=self.build_conf.get_info_url_format(),
            artifacts_allowed_domains=self.build_conf.
            get_artifacts_allowed_domains(),
            low_priority_node_selector=self.build_conf.
            get_low_priority_node_selector())
        build_request.set_openshift_required_version(
            self.os_conf.get_openshift_required_version())
        if build_request.scratch:
            response = self._create_scratch_build(build_request)
        else:
            response = self._create_build_config_and_build(build_request)
        logger.debug(response.json)
        return response

    @osbsapi
    def create_prod_build(self, *args, **kwargs):
        """
        Create a production build

        :param git_uri: str, URI of git repository
        :param git_ref: str, reference to commit
        :param git_branch: str, branch name (may be None)
        :param user: str, user name
        :param component: str, not used anymore
        :param target: str, koji target
        :param architecture: str, build architecture
        :param yum_repourls: list, URLs for yum repos
        :param koji_task_id: int, koji task ID requesting build
        :param scratch: bool, this is a scratch build
        :param platform: str, the platform name
        :param platforms: list<str>, the name of each platform
        :param release: str, the release value to use
        :param inner_template: str, name of inner template for BuildRequest
        :param outer_template: str, name of outer template for BuildRequest
        :param customize_conf: str, name of customization config for BuildRequest
        :param arrangement_version: int, numbered arrangement of plugins for orchestration workflow
        :return: BuildResponse instance
        """
        return self._do_create_prod_build(*args, **kwargs)

    @osbsapi
    def create_prod_with_secret_build(self,
                                      git_uri,
                                      git_ref,
                                      git_branch,
                                      user,
                                      component=None,
                                      target=None,
                                      architecture=None,
                                      yum_repourls=None,
                                      **kwargs):
        warnings.warn(
            "create_prod_with_secret_build is deprecated, please use create_build"
        )
        return self._do_create_prod_build(git_uri,
                                          git_ref,
                                          git_branch,
                                          user,
                                          component,
                                          target,
                                          architecture,
                                          yum_repourls=yum_repourls,
                                          **kwargs)

    @osbsapi
    def create_prod_without_koji_build(self,
                                       git_uri,
                                       git_ref,
                                       git_branch,
                                       user,
                                       component=None,
                                       architecture=None,
                                       yum_repourls=None,
                                       **kwargs):
        warnings.warn(
            "create_prod_without_koji_build is deprecated, please use create_build"
        )
        return self._do_create_prod_build(git_uri,
                                          git_ref,
                                          git_branch,
                                          user,
                                          component,
                                          None,
                                          architecture,
                                          yum_repourls=yum_repourls,
                                          **kwargs)

    @osbsapi
    def create_simple_build(self, **kwargs):
        warnings.warn(
            "simple builds are deprecated, please use the create_build method")
        return self._do_create_prod_build(**kwargs)

    @osbsapi
    def create_build(self, **kwargs):
        """
        take input args, create build request and submit the build

        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        kwargs.setdefault('git_branch', None)
        return self._do_create_prod_build(**kwargs)

    @osbsapi
    def create_worker_build(self, **kwargs):
        """
        Create a worker build

        Pass through method to create_prod_build with the following
        modifications:
            - platform param is required
            - release param is required
            - arrangement_version param is required, which is used to
              select which worker_inner:n.json template to use
            - inner template set to worker_inner:n.json if not set
            - outer template set to worker.json if not set
            - customize configuration set to worker_customize.json if not set

        :return: BuildResponse instance
        """
        missing = set()
        for required in ('platform', 'release', 'arrangement_version'):
            if not kwargs.get(required):
                missing.add(required)

        if missing:
            raise ValueError("Worker build missing required parameters: %s" %
                             missing)

        arrangement_version = kwargs.pop('arrangement_version')
        kwargs.setdefault(
            'inner_template',
            WORKER_INNER_TEMPLATE.format(
                arrangement_version=arrangement_version))
        kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE)
        kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF)

        kwargs.setdefault('git_branch', None)
        try:
            return self._do_create_prod_build(**kwargs)
        except IOError as ex:
            if os.path.basename(ex.filename) == kwargs['inner_template']:
                raise OsbsValidationException(
                    "invalid arrangement_version %s" % arrangement_version)

            raise

    @osbsapi
    def create_orchestrator_build(self, **kwargs):
        """
        Create an orchestrator build

        Pass through method to create_prod_build with the following
        modifications:
            - platforms param is required
            - arrangement_version param may be used to select which
              orchestrator_inner:n.json template to use
            - inner template set to orchestrator_inner:n.json if not set
            - outer template set to orchestrator.json if not set
            - customize configuration set to orchestrator_customize.json if not set

        :return: BuildResponse instance
        """
        if not kwargs.get('platforms'):
            raise ValueError('Orchestrator build requires platforms param')

        if not self.can_orchestrate():
            raise OsbsValidationException("can't create orchestrate build "
                                          "when can_orchestrate isn't enabled")

        arrangement_version = kwargs.setdefault(
            'arrangement_version', self.build_conf.get_arrangement_version())

        kwargs.setdefault(
            'inner_template',
            ORCHESTRATOR_INNER_TEMPLATE.format(
                arrangement_version=arrangement_version))
        kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE)
        kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF)

        kwargs.setdefault('git_branch', None)
        try:
            return self._do_create_prod_build(**kwargs)
        except IOError as ex:
            if os.path.basename(ex.filename) == kwargs['inner_template']:
                raise OsbsValidationException(
                    "invalid arrangement_version %s" % arrangement_version)

            raise

    @osbsapi
    def get_build_logs(self,
                       build_id,
                       follow=False,
                       build_json=None,
                       wait_if_missing=False):
        """
        provide logs from build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :return: None, str or iterator
        """
        return self.os.logs(build_id,
                            follow=follow,
                            build_json=build_json,
                            wait_if_missing=wait_if_missing)

    @osbsapi
    def get_docker_build_logs(self,
                              build_id,
                              decode_logs=True,
                              build_json=None):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id)
            build_response = BuildResponse(build.json())
        else:
            build_response = BuildResponse(build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id):
        response = self.os.wait_for_build_to_finish(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id):
        response = self.os.wait_for_build_to_get_scheduled(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def update_labels_on_build(self, build_id, labels):
        response = self.os.update_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def set_labels_on_build(self, build_id, labels):
        response = self.os.set_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def update_labels_on_build_config(self, build_config_id, labels):
        response = self.os.update_labels_on_build_config(
            build_config_id, labels)
        return response

    @osbsapi
    def set_labels_on_build_config(self, build_config_id, labels):
        response = self.os.set_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def update_annotations_on_build(self, build_id, annotations):
        return self.os.update_annotations_on_build(build_id, annotations)

    @osbsapi
    def set_annotations_on_build(self, build_id, annotations):
        return self.os.set_annotations_on_build(build_id, annotations)

    @osbsapi
    def import_image(self, name):
        """
        Import image tags from a Docker registry into an ImageStream

        :return: bool, whether new tags were imported
        """

        return self.os.import_image(name)

    @osbsapi
    def get_token(self):
        if self.os.use_kerberos:
            return self.os.get_oauth_token()
        else:
            if self.os.token:
                return self.os.token

            raise OsbsValidationException("no token stored for %s" %
                                          self.os_conf.conf_section)

    @osbsapi
    def login(self, token=None, username=None, password=None):
        if self.os.use_kerberos:
            raise OsbsValidationException(
                "can't use login when using kerberos")

        if not token:
            if username:
                self.os.username = username
            else:
                try:
                    self.os.username = raw_input("Username: "******"Username: "******"token is not valid")
            raise

        token_file = utils.get_instance_token_file_name(
            self.os_conf.conf_section)
        token_file_dir = os.path.dirname(token_file)

        if not os.path.exists(token_file_dir):
            os.makedirs(token_file_dir)

        # Inspired by http://stackoverflow.com/a/15015748/5998718
        # For security, remove file with potentially elevated mode
        if os.path.exists(token_file):
            os.remove(token_file)

        # Open file descriptor
        fdesc = os.open(token_file, os.O_WRONLY | os.O_CREAT | os.O_EXCL,
                        stat.S_IRUSR | stat.S_IWUSR)

        with os.fdopen(fdesc, 'w') as f:
            f.write(token + '\n')

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()

    @osbsapi
    def get_serviceaccount_tokens(self, username="******"):
        return self.os.get_serviceaccount_tokens(username)

    @osbsapi
    def get_image_stream_tag(self, tag_id):
        return self.os.get_image_stream_tag(tag_id)

    @osbsapi
    def ensure_image_stream_tag(self, stream, tag_name, scheduled=False):
        """Ensures the tag is monitored in ImageStream

        :param stream: dict, ImageStream object
        :param tag_name: str, name of tag to check, without name of
                              ImageStream as prefix
        :param scheduled: bool, if True, importPolicy.scheduled will be
                                set to True in ImageStreamTag
        :return: bool, whether or not modifications were performed
        """
        img_stream_tag_file = os.path.join(self.os_conf.get_build_json_store(),
                                           'image_stream_tag.json')
        tag_template = json.load(open(img_stream_tag_file))
        return self.os.ensure_image_stream_tag(stream, tag_name, tag_template,
                                               scheduled)

    @osbsapi
    def get_image_stream(self, stream_id):
        return self.os.get_image_stream(stream_id)

    @osbsapi
    def create_image_stream(self,
                            name,
                            docker_image_repository,
                            insecure_registry=False):
        """
        Create an ImageStream object

        Raises exception on error

        :param name: str, name of ImageStream
        :param docker_image_repository: str, pull spec for docker image
               repository
        :param insecure_registry: bool, whether plain HTTP should be used
        :return: response
        """
        img_stream_file = os.path.join(self.os_conf.get_build_json_store(),
                                       'image_stream.json')
        stream = json.load(open(img_stream_file))
        stream['metadata']['name'] = name
        stream['spec']['dockerImageRepository'] = docker_image_repository
        if insecure_registry:
            stream['metadata'].setdefault('annotations', {})
            insecure_annotation = 'openshift.io/image.insecureRepository'
            stream['metadata']['annotations'][insecure_annotation] = 'true'

        return self.os.create_image_stream(json.dumps(stream))

    def _load_quota_json(self, quota_name=None):
        quota_file = os.path.join(self.os_conf.get_build_json_store(),
                                  'pause_quota.json')
        with open(quota_file) as fp:
            quota_json = json.load(fp)

        if quota_name:
            quota_json['metadata']['name'] = quota_name

        return quota_json['metadata']['name'], quota_json

    @osbsapi
    def pause_builds(self, quota_name=None):
        # First, set quota so 0 pods are allowed to be running
        quota_name, quota_json = self._load_quota_json(quota_name)
        self.os.create_resource_quota(quota_name, quota_json)

        # Now wait for running builds to finish
        while True:
            field_selector = ','.join([
                'status=%s' % status.capitalize()
                for status in BUILD_RUNNING_STATES
            ])
            builds = self.list_builds(field_selector)

            # Double check builds are actually in running state.
            running_builds = [build for build in builds if build.is_running()]

            if not running_builds:
                break

            name = running_builds[0].get_build_name()
            logger.info("waiting for build to finish: %s", name)
            self.wait_for_build_to_finish(name)

    @osbsapi
    def resume_builds(self, quota_name=None):
        quota_name, _ = self._load_quota_json(quota_name)
        self.os.delete_resource_quota(quota_name)

    # implements subset of OpenShift's export logic in pkg/cmd/cli/cmd/exporter.go
    @staticmethod
    def _prepare_resource(resource):
        utils.graceful_chain_del(resource, 'metadata', 'resourceVersion')

    @osbsapi
    def dump_resource(self, resource_type):
        return self.os.dump_resource(resource_type).json()

    @osbsapi
    def restore_resource(self,
                         resource_type,
                         resources,
                         continue_on_error=False):
        nfailed = 0
        for r in resources["items"]:
            name = utils.graceful_chain_get(r, 'metadata',
                                            'name') or '(no name)'
            logger.debug("restoring %s/%s", resource_type, name)
            try:
                self._prepare_resource(r)
                self.os.restore_resource(resource_type, r)
            except Exception:
                if continue_on_error:
                    logger.exception("failed to restore %s/%s", resource_type,
                                     name)
                    nfailed += 1
                else:
                    raise

        if continue_on_error:
            ntotal = len(resources["items"])
            logger.info("restored %s/%s %s", ntotal - nfailed, ntotal,
                        resource_type)

    @osbsapi
    def get_compression_extension(self):
        """
        Find the filename extension for the 'docker save' output, which
        may or may not be compressed.

        Raises OsbsValidationException if the extension cannot be
        determined due to a configuration error.

        :returns: str including leading dot, or else None if no compression
        """

        build_request = BuildRequest(
            build_json_store=self.os_conf.get_build_json_store())
        inner = build_request.inner_template
        postbuild_plugins = inner.get('postbuild_plugins', [])
        for plugin in postbuild_plugins:
            if plugin.get('name') == 'compress':
                args = plugin.get('args', {})
                method = args.get('method', 'gzip')
                if method == 'gzip':
                    return '.gz'
                elif method == 'lzma':
                    return '.xz'
                raise OsbsValidationException(
                    "unknown compression method '%s'" % method)

        return None

    @osbsapi
    def list_resource_quotas(self):
        return self.os.list_resource_quotas().json()

    @osbsapi
    def get_resource_quota(self, quota_name):
        return self.os.get_resource_quota(quota_name).json()

    @osbsapi
    def can_orchestrate(self):
        return self.build_conf.get_can_orchestrate()
示例#10
0
文件: api.py 项目: midnightercz/osbs
class OSBS(object):
    """ """
    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            use_kerberos=self.os_conf.get_use_kerberos(),
                            use_auth=self.os_conf.get_use_auth(),
                            verify_ssl=self.os_conf.get_verify_ssl())
        self._bm = None

    # some calls might not need build manager so let's make it lazy
    @property
    def bm(self):
        if self._bm is None:
            self._bm = BuildManager(build_json_store=self.os_conf.get_build_json_store())
        return self._bm

    @osbsapi
    def list_builds(self, namespace=DEFAULT_NAMESPACE):
        # FIXME: return list of BuildResponse objects
        builds = self.os.list_builds(namespace=namespace).json()
        return builds

    @osbsapi
    def get_build(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.get_build(build_id, namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def get_build_request(self, build_type=None):
        """
        return instance of BuildRequest according to specified build type

        :param build_type: str, name of build type
        :return: instance of BuildRequest
        """
        build_type = build_type or self.build_conf.get_build_type()
        return self.bm.get_build_request_by_type(build_type=build_type)

    @osbsapi
    def create_build_from_buildrequest(self, build_request, namespace=DEFAULT_NAMESPACE):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :param namespace: str, place/context where the build should be executed
        :return: instance of build.build_response.BuildResponse
        """
        build = build_request.render()
        response = self.os.create_build(json.dumps(build.build_json), namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def create_prod_build(self, git_uri, git_ref, user, component, target, architecture, yum_repourls=None,
                          namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(PROD_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            yum_repourls=yum_repourls,
            metadata_plugin_use_auth=self.build_conf.get_metadata_plugin_use_auth(),
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_prod_without_koji_build(self, git_uri, git_ref, user, component, architecture, yum_repourls=None,
                                       namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(PROD_WITHOUT_KOJI_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            sources_command=self.build_conf.get_sources_command(),
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            yum_repourls=yum_repourls,
            metadata_plugin_use_auth=self.build_conf.get_metadata_plugin_use_auth(),
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def create_simple_build(self, git_uri, git_ref, user, component, yum_repourls=None,
                            namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(SIMPLE_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            yum_repourls=yum_repourls,
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_build(self, namespace=DEFAULT_NAMESPACE, **kwargs):
        """
        take input args, create build request from provided build type and submit the build

        :param namespace: str, place/context where the build should be executed
        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        build_type = self.build_conf.get_build_type()
        if build_type == PROD_BUILD_TYPE:
            return self.create_prod_build(namespace=namespace, **kwargs)
        elif build_type == SIMPLE_BUILD_TYPE:
            return self.create_simple_build(namespace=namespace, **kwargs)
        elif build_type == PROD_WITHOUT_KOJI_BUILD_TYPE:
            return self.create_prod_without_koji_build(namespace=namespace, **kwargs)
        else:
            raise OsbsException("Unknown build type: '%s'" % build_type)

    @osbsapi
    def get_build_logs(self, build_id, follow=False, namespace=DEFAULT_NAMESPACE):
        if follow:
            return self.os.logs(build_id, follow, namespace=namespace)
        try:
            build = self.os.get_build(build_id, namespace=namespace)
        except OsbsResponseException as ex:
            if ex.status_code != 404:
                raise
        else:
            build_response = BuildResponse(build)
            logs = None
            if build_response.is_finished():
                metadata = build_response.json.get("metadata", {})
                md = metadata.get("annotations", metadata.get("labels", {}))
                logs = md.get("logs", None)

            if logs:
                return logs

            return self.os.logs(build_id, follow=False, namespace=namespace)

    @osbsapi
    def wait_for_build_to_finish(self, build_id, namespace=DEFAULT_NAMESPACE):
        # FIXME: since OS returns whole build json in watch we could return
        #        instance of BuildResponse here
        response = self.os.wait_for_build_to_finish(build_id, namespace=namespace)
        return response

    @osbsapi
    def set_labels_on_build(self, build_id, labels, namespace=DEFAULT_NAMESPACE):
        response = self.os.set_labels_on_build(build_id, labels, namespace=namespace)
        return response

    @osbsapi
    def get_token(self):
        return self.os.get_oauth_token()

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()
示例#11
0
文件: api.py 项目: lcarva/osbs-client
class OSBS(object):
    """
    Note: all API methods return osbs.http.Response object. This is, due to historical
    reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
    and dict respectively.
    """

    _GIT_LABEL_KEYS = ('git-repo-name', 'git-branch')

    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(
            openshift_api_url=self.os_conf.get_openshift_api_uri(),
            openshift_api_version=self.os_conf.get_openshift_api_version(),
            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
            k8s_api_url=self.os_conf.get_k8s_api_uri(),
            verbose=self.os_conf.get_verbosity(),
            username=self.os_conf.get_username(),
            password=self.os_conf.get_password(),
            use_kerberos=self.os_conf.get_use_kerberos(),
            client_cert=self.os_conf.get_client_cert(),
            client_key=self.os_conf.get_client_key(),
            kerberos_keytab=self.os_conf.get_kerberos_keytab(),
            kerberos_principal=self.os_conf.get_kerberos_principal(),
            kerberos_ccache=self.os_conf.get_kerberos_ccache(),
            use_auth=self.os_conf.get_use_auth(),
            verify_ssl=self.os_conf.get_verify_ssl(),
            token=self.os_conf.get_oauth2_token(),
            namespace=self.os_conf.get_namespace())
        self._bm = None

    @osbsapi
    def list_builds(self, field_selector=None, koji_task_id=None):
        """
        List builds with matching fields

        :param field_selector: str, field selector for Builds
        :param koji_task_id: str, only list builds for Koji Task ID
        :return: BuildResponse list
        """

        response = self.os.list_builds(field_selector=field_selector,
                                       koji_task_id=koji_task_id)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(build))
        return build_list

    def watch_builds(self, field_selector=None):
        kwargs = {}
        if field_selector is not None:
            kwargs['fieldSelector'] = field_selector

        for changetype, obj in self.os.watch_resource("builds", **kwargs):
            yield changetype, obj

    @osbsapi
    def get_build(self, build_id):
        response = self.os.get_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def cancel_build(self, build_id):
        response = self.os.cancel_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def get_pod_for_build(self, build_id):
        """
        :return: PodResponse object for pod relating to the build
        """
        pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
        serialized_response = pods.json()
        pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
        if not pod_list:
            raise OsbsException("No pod for build")
        elif len(pod_list) != 1:
            raise OsbsException("Only one pod expected but %d returned",
                                len(pod_list))
        return pod_list[0]

    @osbsapi
    def get_build_request(self, build_type=None):
        """
        return instance of BuildRequest

        :param build_type: str, unused
        :return: instance of BuildRequest
        """
        if build_type is not None:
            warnings.warn(
                "build types are deprecated, do not use the build_type argument"
            )

        build_request = BuildRequest(
            build_json_store=self.os_conf.get_build_json_store())

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or memory_limit is not None
                or storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :return: instance of build.build_response.BuildResponse
        """
        build_request.set_openshift_required_version(
            self.os_conf.get_openshift_required_version())
        build = build_request.render()
        response = self.os.create_build(json.dumps(build))
        build_response = BuildResponse(response.json())
        return build_response

    def _get_running_builds_for_build_config(self, build_config_id):
        all_builds_for_bc = self.os.list_builds(
            build_config_id=build_config_id).json()['items']
        running = []
        for b in all_builds_for_bc:
            br = BuildResponse(b)
            if br.is_pending() or br.is_running():
                running.append(br)
        return running

    def _panic_msg_for_more_running_builds(self, build_config_name, builds):
        # this should never happen, but if it does, we want to know all the builds
        #  that were running at the time
        builds = ', '.join(
            ['%s: %s' % (b.get_build_name(), b.status) for b in builds])
        msg = 'Multiple builds for %s running, can\'t proceed: %s' % \
            (build_config_name, builds)
        return msg

    def _verify_labels_match(self, new_build_config, existing_build_config):
        new_labels = new_build_config['metadata']['labels']
        existing_labels = existing_build_config['metadata']['labels']

        for key in self._GIT_LABEL_KEYS:
            new_label_value = new_labels.get(key)
            existing_label_value = existing_labels.get(key)

            if (existing_label_value
                    and existing_label_value != new_label_value):

                msg = ('Git labels collide with existing build config "%s". '
                       'Existing labels: %r, '
                       'New labels: %r ') % (
                           existing_build_config['metadata']['name'],
                           existing_labels, new_labels)
                raise OsbsValidationException(msg)

    def _get_existing_build_config(self, build_config):
        """
        Uses the given build config to find an existing matching build config.
        Build configs are a match if:
        - metadata.name are equal
        OR
        - metadata.labels.git-repo-name AND metadata.labels.git-branch are equal
        """

        git_labels = [(key, build_config['metadata']['labels'][key])
                      for key in self._GIT_LABEL_KEYS]
        name = build_config['metadata']['name']

        queries = (
            (self.os.get_build_config_by_labels, git_labels),
            (self.os.get_build_config, name),
        )

        existing_bc = None
        for func, arg in queries:
            try:
                existing_bc = func(arg)
                # build config found
                break
            except OsbsException as exc:
                # doesn't exist
                logger.info('Build config NOT found via %s: %s', func.__name__,
                            str(exc))
                continue

        return existing_bc

    def _verify_no_running_builds(self, build_config_name):
        running_builds = self._get_running_builds_for_build_config(
            build_config_name)
        rb_len = len(running_builds)

        if rb_len > 0:
            if rb_len == 1:
                rb = running_builds[0]
                msg = 'Build %s for %s in state %s, can\'t proceed.' % \
                    (rb.get_build_name(), build_config_name, rb.status)
            else:
                msg = self._panic_msg_for_more_running_builds(
                    build_config_name, running_builds)
            raise OsbsException(msg)

    def _create_build_config_and_build(self, build_request):
        build = None

        build_json = build_request.render()
        api_version = build_json['apiVersion']
        if api_version != self.os_conf.get_openshift_api_version():
            raise OsbsValidationException(
                'BuildConfig template has incorrect apiVersion (%s)' %
                api_version)

        build_config_name = build_json['metadata']['name']
        logger.debug('build config to be named "%s"', build_config_name)
        existing_bc = self._get_existing_build_config(build_json)

        if existing_bc is not None:
            self._verify_labels_match(build_json, existing_bc)
            # Existing build config may have a different name if matched by
            # git-repo-name and git-branch labels. Continue using existing
            # build config name.
            build_config_name = existing_bc['metadata']['name']
            logger.debug('existing build config name to be used "%s"',
                         build_config_name)
            self._verify_no_running_builds(build_config_name)

            utils.buildconfig_update(existing_bc, build_json)
            # Reset name change that may have occurred during
            # update above, since renaming is not supported.
            existing_bc['metadata']['name'] = build_config_name
            logger.debug('build config for %s already exists, updating...',
                         build_config_name)
            self.os.update_build_config(build_config_name,
                                        json.dumps(existing_bc))

        else:
            # if it doesn't exist, then create it
            logger.debug('build config for %s doesn\'t exist, creating...',
                         build_config_name)
            bc = self.os.create_build_config(json.dumps(build_json)).json()
            # if there's an "ImageChangeTrigger" on the BuildConfig and "From" is of type
            #  "ImageStreamTag", the build will be scheduled automatically
            #  see https://github.com/projectatomic/osbs-client/issues/205
            if build_request.is_auto_instantiated():
                prev_version = bc['status']['lastVersion']
                build_id = self.os.wait_for_new_build_config_instance(
                    build_config_name, prev_version)
                build = BuildResponse(self.os.get_build(build_id).json())

        if build is None:
            response = self.os.start_build(build_config_name)
            build = BuildResponse(response.json())
        return build

    @osbsapi
    def create_prod_build(
            self,
            git_uri,
            git_ref,
            git_branch,  # may be None
            user,
            component,
            target,  # may be None
            architecture=None,
            yum_repourls=None,
            koji_task_id=None,
            scratch=False,
            labels=None,
            **kwargs):
        """
        Create a production build

        :param git_uri: str, URI of git repository
        :param git_ref: str, reference to commit
        :param git_branch: str, branch name (may be None)
        :param user: str, user name
        :param component: str, component name
        :param target: str, koji target (may be None)
        :param architecture: str, build architecture
        :param yum_repourls: list, URLs for yum repos
        :param koji_task_id: int, koji task ID requesting build
        :param scratch: bool, this is a scratch build
        :param labels: dict, overrides for Dockerfile labels
        :return: BuildResponse instance
        """
        df_parser = utils.get_df_parser(git_uri,
                                        git_ref,
                                        git_branch=git_branch)
        build_request = self.get_build_request()
        name_label_name = 'Name'
        try:
            name_label = df_parser.labels[name_label_name]
        except KeyError:
            raise OsbsValidationException(
                "required label '{name}' missing "
                "from Dockerfile".format(name=name_label_name))

        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            git_branch=git_branch,
            user=user,
            component=component,
            build_image=self.build_conf.get_build_image(),
            build_imagestream=self.build_conf.get_build_imagestream(),
            base_image=df_parser.baseimage,
            name_label=name_label,
            registry_uris=self.build_conf.get_registry_uris(),
            registry_secrets=self.build_conf.get_registry_secrets(),
            source_registry_uri=self.build_conf.get_source_registry_uri(),
            registry_api_versions=self.build_conf.get_registry_api_versions(),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            builder_openshift_url=self.os_conf.get_builder_openshift_url(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            koji_certs_secret=self.build_conf.get_koji_certs_secret(),
            koji_task_id=koji_task_id,
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(
            ),
            distribution_scope=self.build_conf.get_distribution_scope(),
            yum_repourls=yum_repourls,
            proxy=self.build_conf.get_proxy(),
            pulp_secret=self.build_conf.get_pulp_secret(),
            pdc_secret=self.build_conf.get_pdc_secret(),
            pdc_url=self.build_conf.get_pdc_url(),
            smtp_uri=self.build_conf.get_smtp_uri(),
            use_auth=self.build_conf.get_builder_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            nfs_server_path=self.os_conf.get_nfs_server_path(),
            nfs_dest_dir=self.build_conf.get_nfs_destination_dir(),
            git_push_url=self.build_conf.get_git_push_url(),
            git_push_username=self.build_conf.get_git_push_username(),
            builder_build_json_dir=self.build_conf.
            get_builder_build_json_store(),
            labels=labels,
            scratch=scratch,
        )
        build_request.set_openshift_required_version(
            self.os_conf.get_openshift_required_version())
        response = self._create_build_config_and_build(build_request)
        logger.debug(response.json)
        return response

    @osbsapi
    def create_prod_with_secret_build(self,
                                      git_uri,
                                      git_ref,
                                      git_branch,
                                      user,
                                      component,
                                      target,
                                      architecture=None,
                                      yum_repourls=None,
                                      **kwargs):
        return self.create_prod_build(git_uri,
                                      git_ref,
                                      git_branch,
                                      user,
                                      component,
                                      target,
                                      architecture,
                                      yum_repourls=yum_repourls,
                                      **kwargs)

    @osbsapi
    def create_prod_without_koji_build(self,
                                       git_uri,
                                       git_ref,
                                       git_branch,
                                       user,
                                       component,
                                       architecture=None,
                                       yum_repourls=None,
                                       **kwargs):
        return self.create_prod_build(git_uri,
                                      git_ref,
                                      git_branch,
                                      user,
                                      component,
                                      None,
                                      architecture,
                                      yum_repourls=yum_repourls,
                                      **kwargs)

    @osbsapi
    def create_simple_build(self, **kwargs):
        warnings.warn(
            "simple builds are deprecated, please use the create_build method")
        return self.create_prod_build(**kwargs)

    @osbsapi
    def create_build(self, **kwargs):
        """
        take input args, create build request and submit the build

        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        kwargs.setdefault('git_branch', None)
        kwargs.setdefault('target', None)
        return self.create_prod_build(**kwargs)

    @osbsapi
    def get_build_logs(self,
                       build_id,
                       follow=False,
                       build_json=None,
                       wait_if_missing=False):
        """
        provide logs from build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :return: None, str or iterator
        """
        return self.os.logs(build_id,
                            follow=follow,
                            build_json=build_json,
                            wait_if_missing=wait_if_missing)

    @osbsapi
    def get_docker_build_logs(self,
                              build_id,
                              decode_logs=True,
                              build_json=None):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id)
            build_response = BuildResponse(build.json())
        else:
            build_response = BuildResponse(build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id):
        response = self.os.wait_for_build_to_finish(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id):
        response = self.os.wait_for_build_to_get_scheduled(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def update_labels_on_build(self, build_id, labels):
        response = self.os.update_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def set_labels_on_build(self, build_id, labels):
        response = self.os.set_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def update_labels_on_build_config(self, build_config_id, labels):
        response = self.os.update_labels_on_build_config(
            build_config_id, labels)
        return response

    @osbsapi
    def set_labels_on_build_config(self, build_config_id, labels):
        response = self.os.set_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def update_annotations_on_build(self, build_id, annotations):
        return self.os.update_annotations_on_build(build_id, annotations)

    @osbsapi
    def set_annotations_on_build(self, build_id, annotations):
        return self.os.set_annotations_on_build(build_id, annotations)

    @osbsapi
    def import_image(self, name):
        """
        Import image tags from a Docker registry into an ImageStream

        :return: bool, whether new tags were imported
        """

        return self.os.import_image(name)

    @osbsapi
    def get_token(self):
        return self.os.get_oauth_token()

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()

    @osbsapi
    def get_serviceaccount_tokens(self, username="******"):
        return self.os.get_serviceaccount_tokens(username)

    @osbsapi
    def get_image_stream(self, stream_id):
        return self.os.get_image_stream(stream_id)

    @osbsapi
    def create_image_stream(self,
                            name,
                            docker_image_repository,
                            insecure_registry=False):
        """
        Create an ImageStream object

        Raises exception on error

        :param name: str, name of ImageStream
        :param docker_image_repository: str, pull spec for docker image
               repository
        :param insecure_registry: bool, whether plain HTTP should be used
        :return: response
        """
        img_stream_file = os.path.join(self.os_conf.get_build_json_store(),
                                       'image_stream.json')
        stream = json.load(open(img_stream_file))
        stream['metadata']['name'] = name
        stream['spec']['dockerImageRepository'] = docker_image_repository
        if insecure_registry:
            stream['metadata'].setdefault('annotations', {})
            insecure_annotation = 'openshift.io/image.insecureRepository'
            stream['metadata']['annotations'][insecure_annotation] = 'true'

        return self.os.create_image_stream(json.dumps(stream))

    def _load_quota_json(self, quota_name=None):
        quota_file = os.path.join(self.os_conf.get_build_json_store(),
                                  'pause_quota.json')
        with open(quota_file) as fp:
            quota_json = json.load(fp)

        if quota_name:
            quota_json['metadata']['name'] = quota_name

        return quota_json['metadata']['name'], quota_json

    @osbsapi
    def pause_builds(self, quota_name=None):
        # First, set quota so 0 pods are allowed to be running
        quota_name, quota_json = self._load_quota_json(quota_name)
        self.os.create_resource_quota(quota_name, quota_json)

        # Now wait for running builds to finish
        while True:
            field_selector = ','.join([
                'status=%s' % status.capitalize()
                for status in BUILD_RUNNING_STATES
            ])
            builds = self.list_builds(field_selector)

            # Double check builds are actually in running state.
            running_builds = [build for build in builds if build.is_running()]

            if not running_builds:
                break

            name = running_builds[0].get_build_name()
            logger.info("waiting for build to finish: %s", name)
            self.wait_for_build_to_finish(name)

    @osbsapi
    def resume_builds(self, quota_name=None):
        quota_name, _ = self._load_quota_json(quota_name)
        self.os.delete_resource_quota(quota_name)

    # implements subset of OpenShift's export logic in pkg/cmd/cli/cmd/exporter.go
    @staticmethod
    def _prepare_resource(resource):
        utils.graceful_chain_del(resource, 'metadata', 'resourceVersion')

    @osbsapi
    def dump_resource(self, resource_type):
        return self.os.dump_resource(resource_type).json()

    @osbsapi
    def restore_resource(self,
                         resource_type,
                         resources,
                         continue_on_error=False):
        nfailed = 0
        for r in resources["items"]:
            name = utils.graceful_chain_get(r, 'metadata',
                                            'name') or '(no name)'
            logger.debug("restoring %s/%s", resource_type, name)
            try:
                self._prepare_resource(r)
                self.os.restore_resource(resource_type, r)
            except Exception:
                if continue_on_error:
                    logger.exception("failed to restore %s/%s", resource_type,
                                     name)
                    nfailed += 1
                else:
                    raise

        if continue_on_error:
            ntotal = len(resources["items"])
            logger.info("restored %s/%s %s", ntotal - nfailed, ntotal,
                        resource_type)

    @osbsapi
    def get_compression_extension(self):
        """
        Find the filename extension for the 'docker save' output, which
        may or may not be compressed.

        Raises OsbsValidationException if the extension cannot be
        determined due to a configuration error.

        :returns: str including leading dot, or else None if no compression
        """

        build_request = BuildRequest(
            build_json_store=self.os_conf.get_build_json_store())
        inner = build_request.inner_template
        postbuild_plugins = inner.get('postbuild_plugins', [])
        for plugin in postbuild_plugins:
            if plugin.get('name') == 'compress':
                args = plugin.get('args', {})
                method = args.get('method', 'gzip')
                if method == 'gzip':
                    return '.gz'
                elif method == 'lzma':
                    return '.xz'
                raise OsbsValidationException(
                    "unknown compression method '%s'" % method)

        return None

    @osbsapi
    def list_resource_quotas(self):
        return self.os.list_resource_quotas().json()

    @osbsapi
    def get_resource_quota(self, quota_name):
        return self.os.get_resource_quota(quota_name).json()
示例#12
0
文件: api.py 项目: ekuric/osbs-client
class OSBS(object):
    """
    Note: all API methods return osbs.http.Response object. This is, due to historical
    reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
    and dict respectively.
    """
    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_api_version=self.os_conf.get_openshift_api_version(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            k8s_api_url=self.os_conf.get_k8s_api_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            use_kerberos=self.os_conf.get_use_kerberos(),
                            client_cert=self.os_conf.get_client_cert(),
                            client_key=self.os_conf.get_client_key(),
                            kerberos_keytab=self.os_conf.get_kerberos_keytab(),
                            kerberos_principal=self.os_conf.get_kerberos_principal(),
                            kerberos_ccache=self.os_conf.get_kerberos_ccache(),
                            use_auth=self.os_conf.get_use_auth(),
                            verify_ssl=self.os_conf.get_verify_ssl(),
                            namespace=self.os_conf.get_namespace())
        self._bm = None

    # some calls might not need build manager so let's make it lazy
    @property
    def bm(self):
        if self._bm is None:
            self._bm = BuildManager(build_json_store=self.os_conf.get_build_json_store())
        return self._bm

    @osbsapi
    def list_builds(self, field_selector=None):
        """
        List builds with matching fields

        :param field_selector: str, field selector for Builds
        :return: BuildResponse list
        """

        response = self.os.list_builds(field_selector=field_selector)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(build))
        return build_list

    @osbsapi
    def get_build(self, build_id):
        response = self.os.get_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def cancel_build(self, build_id):
        response = self.os.cancel_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def get_pod_for_build(self, build_id):
        """
        :return: PodResponse object for pod relating to the build
        """
        pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
        serialized_response = pods.json()
        pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
        if not pod_list:
            raise OsbsException("No pod for build")
        elif len(pod_list) != 1:
            raise OsbsException("Only one pod expected but %d returned",
                                len(pod_list))
        return pod_list[0]

    @osbsapi
    def get_build_request(self, build_type=None):
        """
        return instance of BuildRequest according to specified build type

        :param build_type: str, name of build type
        :return: instance of BuildRequest
        """
        build_type = build_type or self.build_conf.get_build_type()
        build_request = self.bm.get_build_request_by_type(build_type=build_type)

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or
                memory_limit is not None or
                storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :return: instance of build.build_response.BuildResponse
        """
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        build = build_request.render()
        response = self.os.create_build(json.dumps(build))
        build_response = BuildResponse(response.json())
        return build_response

    def _get_running_builds_for_build_config(self, build_config_id):
        all_builds_for_bc = self.os.list_builds(build_config_id=build_config_id).json()['items']
        running = []
        for b in all_builds_for_bc:
            br = BuildResponse(b)
            if br.is_pending() or br.is_running():
                running.append(br)
        return running

    @staticmethod
    def _panic_msg_for_more_running_builds(self, build_config_name, builds):
        # this should never happen, but if it does, we want to know all the builds
        #  that were running at the time
        builds = ', '.join(['%s: %s' % (b.get_build_name(), b.status) for b in builds])
        msg = 'Multiple builds for %s running, can\'t proceed: %s' % \
            (build_config_name, builds)
        return msg

    def _create_build_config_and_build(self, build_request):
        # TODO: test this method more thoroughly
        build_json = build_request.render()
        api_version = build_json['apiVersion']
        if api_version != self.os_conf.get_openshift_api_version():
            raise OsbsValidationException("BuildConfig template has incorrect apiVersion (%s)" %
                                          api_version)

        build_config_name = build_json['metadata']['name']

        # check if a build already exists for this config; if so then raise
        running_builds = self._get_running_builds_for_build_config(build_config_name)
        rb_len = len(running_builds)
        if rb_len > 0:
            if rb_len == 1:
                rb = running_builds[0]
                msg = 'Build %s for %s in state %s, can\'t proceed.' % \
                    (rb.get_build_name(), build_config_name, rb.status)
            else:
                msg = self._panic_msg_for_more_running_builds(build_config_name, running_builds)
            raise OsbsException(msg)

        try:
            # see if there's already a build config
            existing_bc = self.os.get_build_config(build_config_name)
        except OsbsException:
            # doesn't exist
            existing_bc = None

        build = None
        if existing_bc is not None:
            utils.buildconfig_update(existing_bc, build_json)
            logger.debug('build config for %s already exists, updating...', build_config_name)
            self.os.update_build_config(build_config_name, json.dumps(existing_bc))
        else:
            # if it doesn't exist, then create it
            logger.debug('build config for %s doesn\'t exist, creating...', build_config_name)
            bc = self.os.create_build_config(json.dumps(build_json)).json()
            # if there's an "ImageChangeTrigger" on the BuildConfig and "From" is of type
            #  "ImageStreamTag", the build will be scheduled automatically
            #  see https://github.com/projectatomic/osbs-client/issues/205
            if build_request.is_auto_instantiated():
                prev_version = bc['status']['lastVersion']
                build_id = self.os.wait_for_new_build_config_instance(build_config_name,
                                                                      prev_version)
                build = BuildResponse(self.os.get_build(build_id).json())

        if build is None:
            response = self.os.start_build(build_config_name)
            build = BuildResponse(response.json())
        return build

    @osbsapi
    def create_prod_build(self, git_uri, git_ref,
                          git_branch,  # may be None
                          user, component,
                          target,      # may be None
                          architecture=None, yum_repourls=None,
                          build_image=None,
                          **kwargs):
        """
        Create a production build

        :param git_uri: str, URI of git repository
        :param git_ref: str, reference to commit
        :param git_branch: str, branch name (may be None)
        :param user: str, user name
        :param component: str, component name
        :param target: str, koji target (may be None)
        :param architecture: str, build architecture
        :param yum_repourls: list, URLs for yum repos
        :return: BuildResponse instance
        """
        df_parser = utils.get_df_parser(git_uri, git_ref, git_branch=git_branch)
        build_request = self.get_build_request(PROD_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            git_branch=git_branch,
            user=user,
            component=component,
            build_image=build_image,
            base_image=df_parser.baseimage,
            name_label=df_parser.labels['Name'],
            registry_uris=self.build_conf.get_registry_uris(),
            source_registry_uri=self.build_conf.get_source_registry_uri(),
            registry_api_versions=self.build_conf.get_registry_api_versions(),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            builder_openshift_url=self.os_conf.get_builder_openshift_url(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            distribution_scope=self.build_conf.get_distribution_scope(),
            yum_repourls=yum_repourls,
            pulp_secret=self.build_conf.get_pulp_secret(),
            pdc_secret=self.build_conf.get_pdc_secret(),
            pdc_url=self.build_conf.get_pdc_url(),
            smtp_uri=self.build_conf.get_smtp_uri(),
            use_auth=self.build_conf.get_builder_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            nfs_server_path=self.os_conf.get_nfs_server_path(),
            nfs_dest_dir=self.build_conf.get_nfs_destination_dir(),
            git_push_url=self.build_conf.get_git_push_url(),
            git_push_username=self.build_conf.get_git_push_username(),
            builder_build_json_dir=self.build_conf.get_builder_build_json_store(),
        )
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        response = self._create_build_config_and_build(build_request)
        logger.debug(response.json)
        return response

    @osbsapi
    def create_prod_with_secret_build(self, git_uri, git_ref, git_branch, user, component,
                                      target, architecture=None, yum_repourls=None, **kwargs):
        return self.create_prod_build(git_uri, git_ref, git_branch, user, component, target,
                                      architecture, yum_repourls=yum_repourls, **kwargs)

    @osbsapi
    def create_prod_without_koji_build(self, git_uri, git_ref, git_branch, user, component,
                                       architecture=None, yum_repourls=None, **kwargs):
        return self.create_prod_build(git_uri, git_ref, git_branch, user, component, None,
                                      architecture, yum_repourls=yum_repourls, **kwargs)

    @osbsapi
    def create_simple_build(self, git_uri, git_ref, user, component, tag,
                            yum_repourls=None, build_image=None, **kwargs):
        build_request = self.get_build_request(SIMPLE_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            tag=tag,
            build_image=build_image,
            registry_uris=self.build_conf.get_registry_uris(),
            source_registry_uri=self.build_conf.get_source_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            builder_openshift_url=self.os_conf.get_builder_openshift_url(),
            yum_repourls=yum_repourls,
            use_auth=self.build_conf.get_builder_use_auth(),
        )
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        response = self._create_build_config_and_build(build_request)
        logger.debug(response.json)
        return response

    @osbsapi
    def create_build(self, **kwargs):
        """
        take input args, create build request from provided build type and submit the build

        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        build_type = self.build_conf.get_build_type()
        if build_type in (PROD_BUILD_TYPE,
                          PROD_WITHOUT_KOJI_BUILD_TYPE,
                          PROD_WITH_SECRET_BUILD_TYPE):
            kwargs.setdefault('git_branch', None)
            kwargs.setdefault('target', None)
            return self.create_prod_build(**kwargs)
        elif build_type == SIMPLE_BUILD_TYPE:
            return self.create_simple_build(**kwargs)
        elif build_type == PROD_WITH_SECRET_BUILD_TYPE:
            return self.create_prod_with_secret_build(**kwargs)
        else:
            raise OsbsException("Unknown build type: '%s'" % build_type)

    @osbsapi
    def get_build_logs(self, build_id, follow=False, build_json=None, wait_if_missing=False):
        """
        provide logs from build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :return: None, str or iterator
        """
        return self.os.logs(build_id, follow=follow, build_json=build_json,
                            wait_if_missing=wait_if_missing)

    @osbsapi
    def get_docker_build_logs(self, build_id, decode_logs=True, build_json=None):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id)
            build_response = BuildResponse(build.json())
        else:
            build_response = BuildResponse(build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id):
        response = self.os.wait_for_build_to_finish(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id):
        response = self.os.wait_for_build_to_get_scheduled(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def update_labels_on_build(self, build_id, labels):
        response = self.os.update_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def set_labels_on_build(self, build_id, labels):
        response = self.os.set_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def update_labels_on_build_config(self, build_config_id, labels):
        response = self.os.update_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def set_labels_on_build_config(self, build_config_id, labels):
        response = self.os.set_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def update_annotations_on_build(self, build_id, annotations):
        return self.os.update_annotations_on_build(build_id, annotations)

    @osbsapi
    def set_annotations_on_build(self, build_id, annotations):
        return self.os.set_annotations_on_build(build_id, annotations)

    @osbsapi
    def import_image(self, name):
        """
        Import image tags from a Docker registry into an ImageStream

        :return: bool, whether new tags were imported
        """

        return self.os.import_image(name)

    @osbsapi
    def get_token(self):
        return self.os.get_oauth_token()

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()

    @osbsapi
    def get_image_stream(self, stream_id):
        return self.os.get_image_stream(stream_id)

    @osbsapi
    def create_image_stream(self, name, docker_image_repository,
                            insecure_registry=False):
        """
        Create an ImageStream object

        Raises exception on error

        :param name: str, name of ImageStream
        :param docker_image_repository: str, pull spec for docker image
               repository
        :param insecure_registry: bool, whether plain HTTP should be used
        :return: response
        """
        img_stream_file = os.path.join(self.os_conf.get_build_json_store(), 'image_stream.json')
        stream = json.load(open(img_stream_file))
        stream['metadata']['name'] = name
        stream['spec']['dockerImageRepository'] = docker_image_repository
        if insecure_registry:
            stream['metadata'].setdefault('annotations', {})
            insecure_annotation = 'openshift.io/image.insecureRepository'
            stream['metadata']['annotations'][insecure_annotation] = 'true'

        return self.os.create_image_stream(json.dumps(stream))

    def _load_quota_json(self, quota_name=None):
        quota_file = os.path.join(self.os_conf.get_build_json_store(),
                                  'pause_quota.json')
        with open(quota_file) as fp:
            quota_json = json.load(fp)

        if quota_name:
            quota_json['metadata']['name'] = quota_name

        return quota_json['metadata']['name'], quota_json

    @osbsapi
    def pause_builds(self, quota_name=None):
        # First, set quota so 0 pods are allowed to be running
        quota_name, quota_json = self._load_quota_json(quota_name)
        self.os.create_resource_quota(quota_name, quota_json)

        # Now wait for running builds to finish
        while True:
            builds = self.list_builds()
            running_builds = [build for build in builds if build.is_running()]
            if not running_builds:
                break

            name = running_builds[0].get_build_name()
            logger.info("waiting for build to finish: %s", name)
            self.wait_for_build_to_finish(name)

    @osbsapi
    def resume_builds(self, quota_name=None):
        quota_name, _ = self._load_quota_json(quota_name)
        self.os.delete_resource_quota(quota_name)

    # implements subset of OpenShift's export logic in pkg/cmd/cli/cmd/exporter.go
    @staticmethod
    def _prepare_resource(resource_type, resource):
        utils.graceful_chain_del(resource, 'metadata', 'resourceVersion')

        if resource_type == 'buildconfigs':
            utils.graceful_chain_del(resource, 'status', 'lastVersion')

            triggers = utils.graceful_chain_get(resource, 'spec', 'triggers') or ()
            for t in triggers:
                utils.graceful_chain_del(t, 'imageChange', 'lastTrigerredImageID')

    @osbsapi
    def dump_resource(self, resource_type):
        return self.os.dump_resource(resource_type).json()

    @osbsapi
    def restore_resource(self, resource_type, resources, continue_on_error=False):
        nfailed = 0
        for r in resources["items"]:
            name = utils.graceful_chain_get(r, 'metadata', 'name') or '(no name)'
            logger.debug("restoring %s/%s", resource_type, name)
            try:
                self._prepare_resource(resource_type, r)
                self.os.restore_resource(resource_type, r)
            except Exception:
                if continue_on_error:
                    logger.exception("failed to restore %s/%s", resource_type, name)
                    nfailed += 1
                else:
                    raise

        if continue_on_error:
            ntotal = len(resources["items"])
            logger.info("restored %s/%s %s", ntotal - nfailed, ntotal, resource_type)

    @osbsapi
    def get_compression_extension(self):
        """
        Find the filename extension for the 'docker save' output, which
        may or may not be compressed.

        Raises OsbsValidationException if the extension cannot be
        determined due to a configuration error.

        :returns: str including leading dot, or else None if no compression
        """

        build_type = self.build_conf.get_build_type()
        build_request = self.bm.get_build_request_by_type(build_type=build_type)
        inner = build_request.inner_template
        postbuild_plugins = inner.get('postbuild_plugins', [])
        for plugin in postbuild_plugins:
            if plugin.get('name') == 'compress':
                args = plugin.get('args', {})
                method = args.get('method', 'gzip')
                if method == 'gzip':
                    return '.gz'
                elif method == 'lzma':
                    return '.xz'
                raise OsbsValidationException("unknown compression method '%s'"
                                              % method)

        return None
示例#13
0
def openshift(request):
    os_inst = Openshift(OAPI_PREFIX, API_VER, "/oauth/authorize",
                        k8s_api_url=API_PREFIX)
    os_inst._con = Connection(request.param)
    return os_inst
    def run(self):
        try:
            build_json = json.loads(os.environ["BUILD"])
        except KeyError:
            self.log.error("No $BUILD env variable. Probably not running in build container.")
            return
        try:
            build_id = build_json["metadata"]["name"]
        except KeyError:
            self.log.error("malformed build json")
            return
        self.log.info("build id = %s", build_id)

        api_url = urljoin(self.url, "/osapi/v1beta1/")
        oauth_url = urljoin(self.url, "/oauth/authorize")  # MUST NOT END WITH SLASH

        # initial setup will use host based auth: apache will be set to accept everything
        # from specific IP and will set specific X-Remote-User for such requests
        # FIXME: use OSBS here
        o = Openshift(api_url, oauth_url, None, use_auth=self.use_auth, verify_ssl=self.verify_ssl)

        # usually repositories formed from NVR labels
        # these should be used for pulling and layering
        primary_repositories = []
        for registry in self.workflow.push_conf.all_registries:
            for image in self.workflow.tag_conf.images:
                registry_image = image.copy()
                registry_image.registry = registry.uri
                primary_repositories.append(registry_image.to_str())

        # unique unpredictable repositories
        unique_repositories = []
        target_image = self.workflow.builder.image.copy()
        for registry in self.workflow.push_conf.all_registries:
            target_image.registry = registry.uri
            unique_repositories.append(target_image.to_str())

        repositories = {
            "primary": primary_repositories,
            "unique": unique_repositories,
        }

        try:
            commit_id = self.workflow.source.lg.commit_id
        except AttributeError:
            commit_id = ""

        labels = {
            "dockerfile": self.workflow.prebuild_results.get(CpDockerfilePlugin.key, ""),
            "artefacts": self.workflow.prebuild_results.get(DistgitFetchArtefactsPlugin.key, ""),
            "logs": "\n".join(self.workflow.build_logs),
            "rpm-packages": "\n".join(self.workflow.postbuild_results.get(PostBuildRPMqaPlugin.key, "")),
            "repositories": json.dumps(repositories),
            "commit_id": commit_id,
        }

        tar_path = self.workflow.exported_squashed_image.get("path")
        tar_size = self.workflow.exported_squashed_image.get("size")
        tar_md5sum = self.workflow.exported_squashed_image.get("md5sum")
        tar_sha256sum = self.workflow.exported_squashed_image.get("sha256sum")
        # looks like that openshift can't handle value being None (null in json)
        if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \
                tar_path is not None:
            labels["tar_metadata"] = json.dumps({
                "size": tar_size,
                "md5sum": tar_md5sum,
                "sha256sum": tar_sha256sum,
                "filename": os.path.basename(tar_path),
            })
        o.set_annotations_on_build(build_id, labels)
示例#15
0
class OSBS(object):
    """
    Note: all API methods return osbs.http.Response object. This is, due to historical
    reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
    and dict respectively.
    """

    _GIT_LABEL_KEYS = ('git-repo-name', 'git-branch')

    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_api_version=self.os_conf.get_openshift_api_version(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            k8s_api_url=self.os_conf.get_k8s_api_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            use_kerberos=self.os_conf.get_use_kerberos(),
                            client_cert=self.os_conf.get_client_cert(),
                            client_key=self.os_conf.get_client_key(),
                            kerberos_keytab=self.os_conf.get_kerberos_keytab(),
                            kerberos_principal=self.os_conf.get_kerberos_principal(),
                            kerberos_ccache=self.os_conf.get_kerberos_ccache(),
                            use_auth=self.os_conf.get_use_auth(),
                            verify_ssl=self.os_conf.get_verify_ssl(),
                            token=self.os_conf.get_oauth2_token(),
                            namespace=self.os_conf.get_namespace())
        self._bm = None

    @osbsapi
    def list_builds(self, field_selector=None, koji_task_id=None, running=None,
                    labels=None):
        """
        List builds with matching fields

        :param field_selector: str, field selector for Builds
        :param koji_task_id: str, only list builds for Koji Task ID
        :return: BuildResponse list
        """

        if running:
            running_fs = ",".join(["status!={status}".format(status=status.capitalize())
                                  for status in BUILD_FINISHED_STATES])
            if not field_selector:
                field_selector = running_fs
            else:
                field_selector = ','.join([field_selector, running_fs])
        response = self.os.list_builds(field_selector=field_selector,
                                       koji_task_id=koji_task_id, labels=labels)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(build))

        return build_list

    def watch_builds(self, field_selector=None):
        kwargs = {}
        if field_selector is not None:
            kwargs['fieldSelector'] = field_selector

        for changetype, obj in self.os.watch_resource("builds", **kwargs):
            yield changetype, obj

    @osbsapi
    def get_build(self, build_id):
        response = self.os.get_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def cancel_build(self, build_id):
        response = self.os.cancel_build(build_id)
        build_response = BuildResponse(response.json())
        return build_response

    @osbsapi
    def get_pod_for_build(self, build_id):
        """
        :return: PodResponse object for pod relating to the build
        """
        pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id)
        serialized_response = pods.json()
        pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
        if not pod_list:
            raise OsbsException("No pod for build")
        elif len(pod_list) != 1:
            raise OsbsException("Only one pod expected but %d returned",
                                len(pod_list))
        return pod_list[0]

    @osbsapi
    def get_build_request(self, build_type=None, inner_template=None,
                          outer_template=None, customize_conf=None):
        """
        return instance of BuildRequest

        :param build_type: str, unused
        :param inner_template: str, name of inner template for BuildRequest
        :param outer_template: str, name of outer template for BuildRequest
        :param customize_conf: str, name of customization config for BuildRequest
        :return: instance of BuildRequest
        """
        if build_type is not None:
            warnings.warn("build types are deprecated, do not use the build_type argument")

        build_request = BuildRequest(
            build_json_store=self.os_conf.get_build_json_store(),
            inner_template=inner_template,
            outer_template=outer_template,
            customize_conf=customize_conf)

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or
                memory_limit is not None or
                storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :return: instance of build.build_response.BuildResponse
        """
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        build = build_request.render()
        response = self.os.create_build(json.dumps(build))
        build_response = BuildResponse(response.json())
        return build_response

    def _get_running_builds_for_build_config(self, build_config_id):
        all_builds_for_bc = self.os.list_builds(build_config_id=build_config_id).json()['items']
        running = []
        for b in all_builds_for_bc:
            br = BuildResponse(b)
            if br.is_pending() or br.is_running():
                running.append(br)
        return running

    def _panic_msg_for_more_running_builds(self, build_config_name, builds):
        # this should never happen, but if it does, we want to know all the builds
        #  that were running at the time
        builds = ', '.join(['%s: %s' % (b.get_build_name(), b.status) for b in builds])
        msg = "Multiple builds for %s running, can't proceed: %s" % \
            (build_config_name, builds)
        return msg

    def _verify_labels_match(self, new_build_config, existing_build_config):
        new_labels = new_build_config['metadata']['labels']
        existing_labels = existing_build_config['metadata']['labels']

        for key in self._GIT_LABEL_KEYS:
            new_label_value = new_labels.get(key)
            existing_label_value = existing_labels.get(key)

            if (existing_label_value and existing_label_value != new_label_value):
                msg = (
                    'Git labels collide with existing build config "%s". '
                    'Existing labels: %r, '
                    'New labels: %r ') % (
                       existing_build_config['metadata']['name'],
                       existing_labels,
                       new_labels)
                raise OsbsValidationException(msg)

    def _get_existing_build_config(self, build_config):
        """
        Uses the given build config to find an existing matching build config.
        Build configs are a match if:
        - metadata.name are equal
        OR
        - metadata.labels.git-repo-name AND metadata.labels.git-branch are equal
        """

        git_labels = [(key, build_config['metadata']['labels'][key])
                      for key in self._GIT_LABEL_KEYS]
        name = build_config['metadata']['name']

        queries = (
            (self.os.get_build_config_by_labels, git_labels),
            (self.os.get_build_config, name),
        )

        existing_bc = None
        for func, arg in queries:
            try:
                existing_bc = func(arg)
                # build config found
                break
            except OsbsException as exc:
                # doesn't exist
                logger.info('Build config NOT found via %s: %s',
                            func.__name__, str(exc))
                continue

        return existing_bc

    def _verify_no_running_builds(self, build_config_name):
        running_builds = self._get_running_builds_for_build_config(build_config_name)
        rb_len = len(running_builds)

        if rb_len > 0:
            if rb_len == 1:
                rb = running_builds[0]
                msg = "Build %s for %s in state %s, can't proceed." % \
                    (rb.get_build_name(), build_config_name, rb.status)
            else:
                msg = self._panic_msg_for_more_running_builds(build_config_name, running_builds)
            raise OsbsException(msg)

    def _create_scratch_build(self, build_request):
        return self._create_build_directly(build_request)

    def _create_isolated_build(self, build_request):
        return self._create_build_directly(build_request,
                                           unique=('git-repo-name', 'git-branch', 'isolated'))

    def _create_build_directly(self, build_request, unique=None):
        logger.debug(build_request)
        build_json = build_request.render()
        build_json['kind'] = 'Build'
        build_json['spec']['serviceAccount'] = 'builder'

        builder_img = build_json['spec']['strategy']['customStrategy']['from']
        kind = builder_img['kind']
        if kind == 'ImageStreamTag':
            # Only BuildConfigs get to specify an ImageStreamTag. When
            # creating Builds directly we need to specify a
            # DockerImage.
            response = self.get_image_stream_tag(builder_img['name'])
            ref = response.json()['image']['dockerImageReference']
            builder_img['kind'] = 'DockerImage'
            builder_img['name'] = ref

        if unique:
            unique_labels = {}
            for u in unique:
                unique_labels[u] = build_json['metadata']['labels'][u]
            running_builds = self.list_builds(running=True, labels=unique_labels)
            if running_builds:
                raise RuntimeError('Matching build(s) already running: {0}'
                                   .format(', '.join(x.get_build_name() for x in running_builds)))

        return BuildResponse(self.os.create_build(build_json).json())

    def _get_image_stream_info_for_build_request(self, build_request):
        """Return ImageStream, and ImageStreamTag name for base_image of build_request

        If build_request is not auto instantiated, objects are not fetched
        and None, None is returned.
        """
        image_stream = None
        image_stream_tag_name = None

        if build_request.has_ist_trigger():
            image_stream_tag_id = build_request.spec.trigger_imagestreamtag.value
            image_stream_id, image_stream_tag_name = image_stream_tag_id.split(':')

            try:
                image_stream = self.get_image_stream(image_stream_id).json()
            except OsbsResponseException as x:
                if x.status_code != 404:
                    raise

            if image_stream:
                try:
                    self.get_image_stream_tag(image_stream_tag_id).json()
                except OsbsResponseException as x:
                    if x.status_code != 404:
                        raise

        return image_stream, image_stream_tag_name

    def _create_build_config_and_build(self, build_request):
        build_json = build_request.render()
        api_version = build_json['apiVersion']
        if api_version != self.os_conf.get_openshift_api_version():
            raise OsbsValidationException('BuildConfig template has incorrect apiVersion (%s)' %
                                          api_version)

        build_config_name = build_json['metadata']['name']
        logger.debug('build config to be named "%s"', build_config_name)
        existing_bc = self._get_existing_build_config(build_json)

        image_stream, image_stream_tag_name = \
            self._get_image_stream_info_for_build_request(build_request)

        # Remove triggers in BuildConfig to avoid accidental
        # auto instance of Build. If defined, triggers will
        # be added to BuildConfig after ImageStreamTag object
        # is properly configured.
        triggers = build_json['spec'].pop('triggers', None)

        if existing_bc:
            self._verify_labels_match(build_json, existing_bc)
            # Existing build config may have a different name if matched by
            # git-repo-name and git-branch labels. Continue using existing
            # build config name.
            build_config_name = existing_bc['metadata']['name']
            logger.debug('existing build config name to be used "%s"',
                         build_config_name)
            self._verify_no_running_builds(build_config_name)

            # Remove nodeSelector, will be set from build_json for worker build
            old_nodeselector = existing_bc['spec'].pop('nodeSelector', None)
            logger.debug("removing build config's nodeSelector %s", old_nodeselector)

            utils.buildconfig_update(existing_bc, build_json)
            # Reset name change that may have occurred during
            # update above, since renaming is not supported.
            existing_bc['metadata']['name'] = build_config_name
            logger.debug('build config for %s already exists, updating...',
                         build_config_name)

            self.os.update_build_config(build_config_name, json.dumps(existing_bc))
            if triggers:
                # Retrieve updated version to pick up lastVersion
                existing_bc = self._get_existing_build_config(existing_bc)

        else:
            logger.debug("build config for %s doesn't exist, creating...",
                         build_config_name)
            existing_bc = self.os.create_build_config(json.dumps(build_json)).json()

        if image_stream:
            changed_ist = self.ensure_image_stream_tag(image_stream,
                                                       image_stream_tag_name,
                                                       scheduled=True)
            logger.debug('Changed parent ImageStreamTag? %s', changed_ist)

        if triggers:
            existing_bc['spec']['triggers'] = triggers
            self.os.update_build_config(build_config_name, json.dumps(existing_bc))

        if image_stream and triggers:
            prev_version = existing_bc['status']['lastVersion']
            build_id = self.os.wait_for_new_build_config_instance(
                build_config_name, prev_version)
            build = BuildResponse(self.os.get_build(build_id).json())
        else:
            response = self.os.start_build(build_config_name)
            build = BuildResponse(response.json())

        return build

    def _do_create_prod_build(self, git_uri, git_ref,
                              git_branch,
                              user,
                              component=None,
                              target=None,
                              architecture=None, yum_repourls=None,
                              koji_task_id=None,
                              scratch=None,
                              platform=None,
                              platforms=None,
                              build_type=None,
                              release=None,
                              inner_template=None,
                              outer_template=None,
                              customize_conf=None,
                              arrangement_version=None,
                              filesystem_koji_task_id=None,
                              koji_upload_dir=None,
                              is_auto=False,
                              koji_parent_build=None,
                              isolated=None,
                              **kwargs):
        repo_info = utils.get_repo_info(git_uri, git_ref, git_branch=git_branch)
        df_parser = repo_info.dockerfile_parser
        build_request = self.get_build_request(inner_template=inner_template,
                                               outer_template=outer_template,
                                               customize_conf=customize_conf)
        labels = utils.Labels(df_parser.labels)

        required_missing = False
        req_labels = {}
        # version label isn't used here, but is required label in Dockerfile
        # and is used and required for atomic reactor
        # if we don't catch error here, it will fail in atomic reactor later
        for label in [utils.Labels.LABEL_TYPE_NAME,
                      utils.Labels.LABEL_TYPE_COMPONENT,
                      utils.Labels.LABEL_TYPE_VERSION]:
            try:
                _, req_labels[label] = labels.get_name_and_value(label)
            except KeyError:
                required_missing = True
                logger.error("required label missing from Dockerfile : %s",
                             labels.get_name(label))

        if not git_branch:
            raise OsbsValidationException("required argument 'git_branch' can't be None")

        if required_missing:
            raise OsbsValidationException("required label missing from Dockerfile")

        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            git_branch=git_branch,
            user=user,
            component=req_labels[utils.Labels.LABEL_TYPE_COMPONENT],
            build_image=self.build_conf.get_build_image(),
            build_imagestream=self.build_conf.get_build_imagestream(),
            base_image=df_parser.baseimage,
            name_label=req_labels[utils.Labels.LABEL_TYPE_NAME],
            registry_uris=self.build_conf.get_registry_uris(),
            registry_secrets=self.build_conf.get_registry_secrets(),
            source_registry_uri=self.build_conf.get_source_registry_uri(),
            registry_api_versions=self.build_conf.get_registry_api_versions(platform),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            builder_openshift_url=self.os_conf.get_builder_openshift_url(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            koji_certs_secret=self.build_conf.get_koji_certs_secret(),
            koji_task_id=koji_task_id,
            koji_use_kerberos=self.build_conf.get_koji_use_kerberos(),
            koji_kerberos_keytab=self.build_conf.get_koji_kerberos_keytab(),
            koji_kerberos_principal=self.build_conf.get_koji_kerberos_principal(),
            architecture=architecture,
            platforms=platforms,
            platform=platform,
            build_type=build_type,
            release=release,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            distribution_scope=self.build_conf.get_distribution_scope(),
            yum_repourls=yum_repourls,
            proxy=self.build_conf.get_proxy(),
            pulp_secret=self.build_conf.get_pulp_secret(),
            smtp_host=self.build_conf.get_smtp_host(),
            smtp_from=self.build_conf.get_smtp_from(),
            smtp_additional_addresses=self.build_conf.get_smtp_additional_addresses(),
            smtp_error_addresses=self.build_conf.get_smtp_error_addresses(),
            smtp_email_domain=self.build_conf.get_smtp_email_domain(),
            smtp_to_submitter=self.build_conf.get_smtp_to_submitter(),
            smtp_to_pkgowner=self.build_conf.get_smtp_to_pkgowner(),
            use_auth=self.build_conf.get_builder_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            nfs_server_path=self.os_conf.get_nfs_server_path(),
            nfs_dest_dir=self.build_conf.get_nfs_destination_dir(),
            builder_build_json_dir=self.build_conf.get_builder_build_json_store(),
            scratch=self.build_conf.get_scratch(scratch),
            reactor_config_secret=self.build_conf.get_reactor_config_secret(),
            client_config_secret=self.build_conf.get_client_config_secret(),
            token_secrets=self.build_conf.get_token_secrets(),
            arrangement_version=arrangement_version,
            info_url_format=self.build_conf.get_info_url_format(),
            artifacts_allowed_domains=self.build_conf.get_artifacts_allowed_domains(),
            equal_labels=self.build_conf.get_equal_labels(),
            platform_node_selector=self.build_conf.get_platform_node_selector(platform),
            scratch_build_node_selector=self.build_conf.get_scratch_build_node_selector(),
            explicit_build_node_selector=self.build_conf.get_explicit_build_node_selector(),
            auto_build_node_selector=self.build_conf.get_auto_build_node_selector(),
            is_auto=is_auto,
            filesystem_koji_task_id=filesystem_koji_task_id,
            koji_upload_dir=koji_upload_dir,
            platform_descriptors=self.build_conf.get_platform_descriptors(),
            koji_parent_build=koji_parent_build,
            group_manifests=self.os_conf.get_group_manifests(),
            isolated=isolated,
            prefer_schema1_digest=self.build_conf.get_prefer_schema1_digest(),
        )
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        build_request.set_repo_info(repo_info)
        if build_request.scratch:
            response = self._create_scratch_build(build_request)
        elif build_request.isolated:
            response = self._create_isolated_build(build_request)
        else:
            response = self._create_build_config_and_build(build_request)
        logger.debug(response.json)
        return response

    @osbsapi
    def create_prod_build(self, *args, **kwargs):
        """
        Create a production build

        :param git_uri: str, URI of git repository
        :param git_ref: str, reference to commit
        :param git_branch: str, branch name
        :param user: str, user name
        :param component: str, not used anymore
        :param target: str, koji target
        :param architecture: str, build architecture
        :param yum_repourls: list, URLs for yum repos
        :param koji_task_id: int, koji task ID requesting build
        :param scratch: bool, this is a scratch build
        :param platform: str, the platform name
        :param platforms: list<str>, the name of each platform
        :param release: str, the release value to use
        :param inner_template: str, name of inner template for BuildRequest
        :param outer_template: str, name of outer template for BuildRequest
        :param customize_conf: str, name of customization config for BuildRequest
        :param arrangement_version: int, numbered arrangement of plugins for orchestration workflow
        :return: BuildResponse instance
        """
        warnings.warn("prod (all-in-one) builds are deprecated, "
                      "please use create_orchestrator_build")
        return self._do_create_prod_build(*args, **kwargs)

    @osbsapi
    def create_prod_with_secret_build(self, git_uri, git_ref, git_branch, user, component=None,
                                      target=None, architecture=None, yum_repourls=None, **kwargs):
        warnings.warn("create_prod_with_secret_build is deprecated, please use create_build")
        return self._do_create_prod_build(git_uri, git_ref, git_branch, user,
                                          component, target, architecture,
                                          yum_repourls=yum_repourls, **kwargs)

    @osbsapi
    def create_prod_without_koji_build(self, git_uri, git_ref, git_branch, user, component=None,
                                       architecture=None, yum_repourls=None, **kwargs):
        warnings.warn("create_prod_without_koji_build is deprecated, please use create_build")
        return self._do_create_prod_build(git_uri, git_ref, git_branch, user,
                                          component, None, architecture,
                                          yum_repourls=yum_repourls, **kwargs)

    @osbsapi
    def create_simple_build(self, **kwargs):
        warnings.warn("simple builds are deprecated, please use the create_build method")
        return self._do_create_prod_build(**kwargs)

    @osbsapi
    def create_build(self, **kwargs):
        """
        take input args, create build request and submit the build

        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        return self._do_create_prod_build(**kwargs)

    @osbsapi
    def create_worker_build(self, **kwargs):
        """
        Create a worker build

        Pass through method to create_prod_build with the following
        modifications:
            - platform param is required
            - release param is required
            - arrangement_version param is required, which is used to
              select which worker_inner:n.json template to use
            - inner template set to worker_inner:n.json if not set
            - outer template set to worker.json if not set
            - customize configuration set to worker_customize.json if not set

        :return: BuildResponse instance
        """
        missing = set()
        for required in ('platform', 'release', 'arrangement_version'):
            if not kwargs.get(required):
                missing.add(required)

        if missing:
            raise ValueError("Worker build missing required parameters: %s" %
                             missing)

        if kwargs.get('platforms'):
            raise ValueError("Worker build called with unwanted platforms param")

        arrangement_version = kwargs['arrangement_version']
        kwargs.setdefault('inner_template', WORKER_INNER_TEMPLATE.format(
            arrangement_version=arrangement_version))
        kwargs.setdefault('outer_template', WORKER_OUTER_TEMPLATE)
        kwargs.setdefault('customize_conf', WORKER_CUSTOMIZE_CONF)
        kwargs['build_type'] = BUILD_TYPE_WORKER
        try:
            return self._do_create_prod_build(**kwargs)
        except IOError as ex:
            if os.path.basename(ex.filename) == kwargs['inner_template']:
                raise OsbsValidationException("worker invalid arrangement_version %s" %
                                              arrangement_version)

            raise

    @osbsapi
    def create_orchestrator_build(self, **kwargs):
        """
        Create an orchestrator build

        Pass through method to create_prod_build with the following
        modifications:
            - platforms param is required
            - arrangement_version param may be used to select which
              orchestrator_inner:n.json template to use
            - inner template set to orchestrator_inner:n.json if not set
            - outer template set to orchestrator.json if not set
            - customize configuration set to orchestrator_customize.json if not set

        :return: BuildResponse instance
        """
        if not kwargs.get('platforms'):
            raise ValueError('Orchestrator build requires platforms param')

        if not self.can_orchestrate():
            raise OsbsOrchestratorNotEnabled("can't create orchestrate build "
                                             "when can_orchestrate isn't enabled")
        extra = [x for x in ('platform',) if kwargs.get(x)]
        if extra:
            raise ValueError("Orchestrator build called with unwanted parameters: %s" %
                             extra)

        arrangement_version = kwargs.setdefault('arrangement_version',
                                                self.build_conf.get_arrangement_version())

        kwargs.setdefault('inner_template', ORCHESTRATOR_INNER_TEMPLATE.format(
            arrangement_version=arrangement_version))
        kwargs.setdefault('outer_template', ORCHESTRATOR_OUTER_TEMPLATE)
        kwargs.setdefault('customize_conf', ORCHESTRATOR_CUSTOMIZE_CONF)
        kwargs['build_type'] = BUILD_TYPE_ORCHESTRATOR
        try:
            return self._do_create_prod_build(**kwargs)
        except IOError as ex:
            if os.path.basename(ex.filename) == kwargs['inner_template']:
                raise OsbsValidationException("orchestrator invalid arrangement_version %s" %
                                              arrangement_version)

            raise

    def _decode_build_logs_generator(self, logs):
        for line in logs:
            line = line.decode("utf-8").rstrip()
            yield line

    @osbsapi
    def get_build_logs(self, build_id, follow=False, build_json=None, wait_if_missing=False,
                       decode=False):
        """
        provide logs from build

        NOTE: Since atomic-reactor 1.6.25, logs are always in UTF-8, so if
        asked to decode, we assume that is the encoding in use. Otherwise, we
        return the bytes exactly as they came from the container.

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :param decode: bool, whether or not to decode logs as utf-8
        :return: None, bytes, or iterable of bytes
        """
        logs = self.os.logs(build_id, follow=follow, build_json=build_json,
                            wait_if_missing=wait_if_missing)

        if decode and isinstance(logs, GeneratorType):
            return self._decode_build_logs_generator(logs)

        # str or None returned from self.os.logs()
        if decode and logs is not None:
            logs = logs.decode("utf-8").rstrip()

        return logs

    @staticmethod
    def _parse_build_log_entry(entry):
        items = entry.split()
        if len(items) < 4:
            # This is not a valid build log entry
            return (None, entry)

        platform = items[2]
        if not platform.startswith("platform:"):
            # Line logged without using the appropriate LoggerAdapter
            return (None, entry)

        platform = platform.split(":", 1)[1]
        if platform == "-":
            return (None, entry)  # proper orchestrator build log entry

        # Anything else should be a worker build log entry, so we strip off
        # the leading 8 wrapping orchestrator log fields:
        # <date> <time> <platform> - <name> - <level> -
        plen = sum(len(items[i]) + 1  # include trailing space
                   for i in range(8))
        line = entry[plen:]
        # if the 3rd field is "platform:-", we strip it out
        items = line.split()
        if len(items) > 2 and items[2] == "platform:-":
            plen = sum(len(items[i]) + 1  # include trailing space
                       for i in range(3))
            line = "%s %s %s" % (items[0], items[1], line[plen:])
        return (platform, line)

    @osbsapi
    def get_orchestrator_build_logs(self, build_id, follow=False, wait_if_missing=False):
        """
        provide logs from orchestrator build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param wait_if_missing: bool, if build doesn't exist, wait
        :return: generator yielding objects with attributes 'platform' and 'line'
        """
        logs = self.get_build_logs(build_id=build_id, follow=follow,
                                   wait_if_missing=wait_if_missing, decode=True)

        if isinstance(logs, GeneratorType):
            for entries in logs:
                for entry in entries.splitlines():
                    yield LogEntry(*self._parse_build_log_entry(entry))
        else:
            for entry in logs.splitlines():
                yield LogEntry(*self._parse_build_log_entry(entry))

    @osbsapi
    def get_docker_build_logs(self, build_id, decode_logs=True, build_json=None):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id)
            build_response = BuildResponse(build.json())
        else:
            build_response = BuildResponse(build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id):
        response = self.os.wait_for_build_to_finish(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id):
        response = self.os.wait_for_build_to_get_scheduled(build_id)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def update_labels_on_build(self, build_id, labels):
        response = self.os.update_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def set_labels_on_build(self, build_id, labels):
        response = self.os.set_labels_on_build(build_id, labels)
        return response

    @osbsapi
    def update_labels_on_build_config(self, build_config_id, labels):
        response = self.os.update_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def set_labels_on_build_config(self, build_config_id, labels):
        response = self.os.set_labels_on_build_config(build_config_id, labels)
        return response

    @osbsapi
    def update_annotations_on_build(self, build_id, annotations):
        return self.os.update_annotations_on_build(build_id, annotations)

    @osbsapi
    def set_annotations_on_build(self, build_id, annotations):
        return self.os.set_annotations_on_build(build_id, annotations)

    @osbsapi
    def import_image(self, name):
        """
        Import image tags from a Docker registry into an ImageStream

        :return: bool, whether new tags were imported
        """

        return self.os.import_image(name)

    @osbsapi
    def get_token(self):
        if self.os.use_kerberos:
            return self.os.get_oauth_token()
        else:
            if self.os.token:
                return self.os.token

            raise OsbsValidationException("no token stored for %s" % self.os_conf.conf_section)

    @osbsapi
    def login(self, token=None, username=None, password=None):
        if self.os.use_kerberos:
            raise OsbsValidationException("can't use login when using kerberos")

        if not token:
            if username:
                self.os.username = username
            else:
                try:
                    self.os.username = raw_input("Username: "******"Username: "******"token is not valid")
            raise

        token_file = utils.get_instance_token_file_name(self.os_conf.conf_section)
        token_file_dir = os.path.dirname(token_file)

        if not os.path.exists(token_file_dir):
            os.makedirs(token_file_dir)

        # Inspired by http://stackoverflow.com/a/15015748/5998718
        # For security, remove file with potentially elevated mode
        if os.path.exists(token_file):
            os.remove(token_file)

        # Open file descriptor
        fdesc = os.open(token_file,
                        os.O_WRONLY | os.O_CREAT | os.O_EXCL,
                        stat.S_IRUSR | stat.S_IWUSR)

        with os.fdopen(fdesc, 'w') as f:
            f.write(token + '\n')

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()

    @osbsapi
    def get_serviceaccount_tokens(self, username="******"):
        return self.os.get_serviceaccount_tokens(username)

    @osbsapi
    def get_image_stream_tag(self, tag_id):
        return self.os.get_image_stream_tag(tag_id)

    @osbsapi
    def ensure_image_stream_tag(self, stream, tag_name, scheduled=False):
        """Ensures the tag is monitored in ImageStream

        :param stream: dict, ImageStream object
        :param tag_name: str, name of tag to check, without name of
                              ImageStream as prefix
        :param scheduled: bool, if True, importPolicy.scheduled will be
                                set to True in ImageStreamTag
        :return: bool, whether or not modifications were performed
        """
        img_stream_tag_file = os.path.join(self.os_conf.get_build_json_store(),
                                           'image_stream_tag.json')
        tag_template = json.load(open(img_stream_tag_file))
        return self.os.ensure_image_stream_tag(stream, tag_name, tag_template,
                                               scheduled)

    @osbsapi
    def get_image_stream(self, stream_id):
        return self.os.get_image_stream(stream_id)

    @osbsapi
    def create_image_stream(self, name, docker_image_repository,
                            insecure_registry=False):
        """
        Create an ImageStream object

        Raises exception on error

        :param name: str, name of ImageStream
        :param docker_image_repository: str, pull spec for docker image
               repository
        :param insecure_registry: bool, whether plain HTTP should be used
        :return: response
        """
        img_stream_file = os.path.join(self.os_conf.get_build_json_store(), 'image_stream.json')
        stream = json.load(open(img_stream_file))
        stream['metadata']['name'] = name
        stream['spec']['dockerImageRepository'] = docker_image_repository
        if insecure_registry:
            stream['metadata'].setdefault('annotations', {})
            insecure_annotation = 'openshift.io/image.insecureRepository'
            stream['metadata']['annotations'][insecure_annotation] = 'true'

        return self.os.create_image_stream(json.dumps(stream))

    def _load_quota_json(self, quota_name=None):
        quota_file = os.path.join(self.os_conf.get_build_json_store(),
                                  'pause_quota.json')
        with open(quota_file) as fp:
            quota_json = json.load(fp)

        if quota_name:
            quota_json['metadata']['name'] = quota_name

        return quota_json['metadata']['name'], quota_json

    @osbsapi
    def pause_builds(self, quota_name=None):
        # First, set quota so 0 pods are allowed to be running
        quota_name, quota_json = self._load_quota_json(quota_name)
        self.os.create_resource_quota(quota_name, quota_json)

        # Now wait for running builds to finish
        while True:
            field_selector = ','.join(['status=%s' % status.capitalize()
                                       for status in BUILD_RUNNING_STATES])
            builds = self.list_builds(field_selector)

            # Double check builds are actually in running state.
            running_builds = [build for build in builds if build.is_running()]

            if not running_builds:
                break

            name = running_builds[0].get_build_name()
            logger.info("waiting for build to finish: %s", name)
            self.wait_for_build_to_finish(name)

    @osbsapi
    def resume_builds(self, quota_name=None):
        quota_name, _ = self._load_quota_json(quota_name)
        self.os.delete_resource_quota(quota_name)

    # implements subset of OpenShift's export logic in pkg/cmd/cli/cmd/exporter.go
    @staticmethod
    def _prepare_resource(resource):
        utils.graceful_chain_del(resource, 'metadata', 'resourceVersion')

    @osbsapi
    def dump_resource(self, resource_type):
        return self.os.dump_resource(resource_type).json()

    @osbsapi
    def restore_resource(self, resource_type, resources, continue_on_error=False):
        nfailed = 0
        for r in resources["items"]:
            name = utils.graceful_chain_get(r, 'metadata', 'name') or '(no name)'
            logger.debug("restoring %s/%s", resource_type, name)
            try:
                self._prepare_resource(r)
                self.os.restore_resource(resource_type, r)
            except Exception:
                if continue_on_error:
                    logger.exception("failed to restore %s/%s", resource_type, name)
                    nfailed += 1
                else:
                    raise

        if continue_on_error:
            ntotal = len(resources["items"])
            logger.info("restored %s/%s %s", ntotal - nfailed, ntotal, resource_type)

    @osbsapi
    def get_compression_extension(self):
        """
        Find the filename extension for the 'docker save' output, which
        may or may not be compressed.

        Raises OsbsValidationException if the extension cannot be
        determined due to a configuration error.

        :returns: str including leading dot, or else None if no compression
        """

        build_request = BuildRequest(build_json_store=self.os_conf.get_build_json_store())
        inner = build_request.inner_template
        postbuild_plugins = inner.get('postbuild_plugins', [])
        for plugin in postbuild_plugins:
            if plugin.get('name') == 'compress':
                args = plugin.get('args', {})
                method = args.get('method', 'gzip')
                if method == 'gzip':
                    return '.gz'
                elif method == 'lzma':
                    return '.xz'
                raise OsbsValidationException("unknown compression method '%s'"
                                              % method)

        return None

    @osbsapi
    def list_resource_quotas(self):
        return self.os.list_resource_quotas().json()

    @osbsapi
    def get_resource_quota(self, quota_name):
        return self.os.get_resource_quota(quota_name).json()

    @osbsapi
    def can_orchestrate(self):
        return self.build_conf.get_can_orchestrate()

    @osbsapi
    def create_config_map(self, name, data):
        """
        Create an ConfigMap object on the server

        Raises exception on error

        :param name: str, name of configMap
        :param data: dict, dictionary of data to be stored
        :returns: ConfigMapResponse containing the ConfigMap with name and data
        """
        config_data_file = os.path.join(self.os_conf.get_build_json_store(), 'config_map.json')
        config_data = json.load(open(config_data_file))
        config_data['metadata']['name'] = name
        data_dict = {}
        for key, value in data.items():
            data_dict[key] = json.dumps(value)
        config_data['data'] = data_dict

        response = self.os.create_config_map(config_data)
        config_map_response = ConfigMapResponse(response.json())
        return config_map_response

    @osbsapi
    def get_config_map(self, name):
        """
        Get a ConfigMap object from the server

        Raises exception on error

        :param name: str, name of configMap to get from the server
        :returns: ConfigMapResponse containing the ConfigMap with the requested name
        """
        response = self.os.get_config_map(name)
        config_map_response = ConfigMapResponse(response.json())
        return config_map_response

    @osbsapi
    def delete_config_map(self, name):
        """
        Delete a ConfigMap object from the server

        Raises exception on error

        :param name: str, name of configMap to delete from the server
        :returns: True on success
        """
        response = self.os.delete_config_map(name)
        return response

    @contextmanager
    def retries_disabled(self):
        """
        Context manager to disable retries on requests
        :returns: OSBS object
        """
        self.os.retries_enabled = False
        yield
        self.os.retries_enabled = True
示例#16
0
class OSBS(object):
    """
    Note: all API methods return osbs.http.Response object. This is, due to historical
    reasons, untrue for list_builds and get_user, which return list of BuildResponse objects
    and dict respectively.
    """
    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_api_version=self.os_conf.get_openshift_api_version(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            k8s_api_url=self.os_conf.get_k8s_api_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            use_kerberos=self.os_conf.get_use_kerberos(),
                            client_cert=self.os_conf.get_client_cert(),
                            client_key=self.os_conf.get_client_key(),
                            kerberos_keytab=self.os_conf.get_kerberos_keytab(),
                            kerberos_principal=self.os_conf.get_kerberos_principal(),
                            kerberos_ccache=self.os_conf.get_kerberos_ccache(),
                            use_auth=self.os_conf.get_use_auth(),
                            verify_ssl=self.os_conf.get_verify_ssl())
        self._bm = None

    # some calls might not need build manager so let's make it lazy
    @property
    def bm(self):
        if self._bm is None:
            self._bm = BuildManager(build_json_store=self.os_conf.get_build_json_store())
        return self._bm

    @osbsapi
    def list_builds(self, namespace=DEFAULT_NAMESPACE):
        response = self.os.list_builds(namespace=namespace)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(None, build))
        return build_list

    @osbsapi
    def get_build(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.get_build(build_id, namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def cancel_build(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.cancel_build(build_id, namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def get_pod_for_build(self, build_id, namespace=DEFAULT_NAMESPACE):
        """
        :return: PodResponse object for pod relating to the build
        """
        pods = self.os.list_pods(label='openshift.io/build.name=%s' % build_id,
                                 namespace=namespace)
        serialized_response = pods.json()
        pod_list = [PodResponse(pod) for pod in serialized_response["items"]]
        if not pod_list:
            raise OsbsException("No pod for build")
        elif len(pod_list) != 1:
            raise OsbsException("Only one pod expected but %d returned",
                                len(pod_list))
        return pod_list[0]

    @osbsapi
    def get_build_request(self, build_type=None):
        """
        return instance of BuildRequest according to specified build type

        :param build_type: str, name of build type
        :return: instance of BuildRequest
        """
        build_type = build_type or self.build_conf.get_build_type()
        build_request = self.bm.get_build_request_by_type(build_type=build_type)

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or
                memory_limit is not None or
                storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request, namespace=DEFAULT_NAMESPACE):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :param namespace: str, place/context where the build should be executed
        :return: instance of build.build_response.BuildResponse
        """
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        build = build_request.render()
        response = self.os.create_build(json.dumps(build), namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    def _get_running_builds_for_build_config(self, build_config_id, namespace=DEFAULT_NAMESPACE):
        all_builds_for_bc = self.os.list_builds(
            build_config_id=build_config_id,
            namespace=namespace).json()['items']
        running = []
        for b in all_builds_for_bc:
            br = BuildResponse(request=None, build_json=b)
            if br.is_pending() or br.is_running():
                running.append(br)
        return running

    def _poll_for_builds_from_buildconfig(self, build_config_id, namespace=DEFAULT_NAMESPACE):
        # try polling for 60 seconds and then fail if build doesn't appear
        deadline = int(time.time()) + 60
        while int(time.time()) < deadline:
            logger.debug('polling for build from BuildConfig "%s"' % build_config_id)
            builds = self._get_running_builds_for_build_config(build_config_id, namespace)
            if len(builds) > 0:
                return builds
            # wait for 5 seconds before trying again
            time.sleep(5)

        raise OsbsException('Waited for new build from "%s", but none was automatically created' %
                            build_config_id)

    def _panic_msg_for_more_running_builds(self, build_config_name, builds):
        # this should never happen, but if it does, we want to know all the builds
        #  that were running at the time
        builds = ', '.join(['%s: %s' % (b.get_build_name(), b.status) for b in builds])
        msg = 'Multiple builds for %s running, can\'t proceed: %s' % \
            (build_config_name, builds)
        return msg

    def _create_build_config_and_build(self, build_request, namespace):
        # TODO: test this method more thoroughly
        build_json = build_request.render()
        apiVersion = build_json['apiVersion']
        if apiVersion != self.os_conf.get_openshift_api_version():
            raise OsbsValidationException("BuildConfig template has incorrect apiVersion (%s)" %
                                          apiVersion)

        build_config_name = build_json['metadata']['name']

        # check if a build already exists for this config; if so then raise
        running_builds = self._get_running_builds_for_build_config(build_config_name, namespace)
        rb_len = len(running_builds)
        if rb_len > 0:
            if rb_len == 1:
                rb = running_builds[0]
                msg = 'Build %s for %s in state %s, can\'t proceed.' % \
                    (rb.get_build_name(), build_config_name, rb.status)
            else:
                msg = self._panic_msg_for_more_running_builds(build_config_name, running_builds)
            raise OsbsException(msg)

        existing_bc = None
        try:
            # see if there's already a build config
            existing_bc = self.os.get_build_config(build_config_name)
        except OsbsException:
            pass  # doesn't exist => do nothing

        build = None
        if existing_bc is not None:
            utils.deep_update(existing_bc, build_json)
            logger.debug('build config for %s already exists, updating...', build_config_name)
            self.os.update_build_config(build_config_name, json.dumps(existing_bc), namespace)
        else:
            # if it doesn't exist, then create it
            logger.debug('build config for %s doesn\'t exist, creating...', build_config_name)
            self.os.create_build_config(json.dumps(build_json), namespace=namespace)
            # if there's an "ImageChangeTrigger" on the BuildConfig and "From" is of type
            #  "ImageStreamTag", the build will be scheduled automatically
            #  see https://github.com/projectatomic/osbs-client/issues/205
            if build_request.is_auto_instantiated():
                builds = self._poll_for_builds_from_buildconfig(build_config_name, namespace)
                if len(builds) > 0:
                    if len(builds) > 1:
                        raise OsbsException(
                            self._panic_msg_for_more_running_builds(build_config_name, builds))
                    else:
                        build = builds[0].request
        if build is None:
            build = self.os.start_build(build_config_name, namespace=namespace)
        return build

    @osbsapi
    def create_prod_build(self, git_uri, git_ref, git_branch, user, component, target,
                          architecture, yum_repourls=None, git_push_url=None,
                          namespace=DEFAULT_NAMESPACE, **kwargs):
        df_parser = utils.get_df_parser(git_uri, git_ref, git_branch)
        build_request = self.get_build_request(PROD_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            git_branch=git_branch,
            user=user,
            component=component,
            base_image=df_parser.baseimage,
            name_label=df_parser.labels['Name'],
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            yum_repourls=yum_repourls,
            pulp_secret=self.build_conf.get_pulp_secret(),
            use_auth=self.build_conf.get_builder_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            nfs_server_path=self.os_conf.get_nfs_server_path(),
            nfs_dest_dir=self.build_conf.get_nfs_destination_dir(),
            git_push_url=self.build_conf.get_git_push_url(),
            git_push_username=self.build_conf.get_git_push_username(),
        )
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        response = self._create_build_config_and_build(build_request, namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_prod_with_secret_build(self, git_uri, git_ref, git_branch, user, component,
                                      target, architecture, yum_repourls=None,
                                      namespace=DEFAULT_NAMESPACE, **kwargs):
        return self.create_prod_build(git_uri, git_ref, git_branch, user, component, target,
                                      architecture, yum_repourls=yum_repourls,
                                      namespace=namespace, **kwargs)

    @osbsapi
    def create_prod_without_koji_build(self, git_uri, git_ref, git_branch, user, component,
                                       architecture, yum_repourls=None,
                                       namespace=DEFAULT_NAMESPACE, **kwargs):
        return self.create_prod_build(git_uri, git_ref, git_branch, user, component, None,
                                      architecture, yum_repourls=yum_repourls,
                                      namespace=namespace, **kwargs)

    @osbsapi
    def create_simple_build(self, git_uri, git_ref, user, component, yum_repourls=None,
                            namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(SIMPLE_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_base_uri(),
            yum_repourls=yum_repourls,
            use_auth=self.build_conf.get_builder_use_auth(),
        )
        build_request.set_openshift_required_version(self.os_conf.get_openshift_required_version())
        response = self._create_build_config_and_build(build_request, namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_build(self, namespace=DEFAULT_NAMESPACE, **kwargs):
        """
        take input args, create build request from provided build type and submit the build

        :param namespace: str, place/context where the build should be executed
        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        build_type = self.build_conf.get_build_type()
        if build_type in (PROD_BUILD_TYPE,
                          PROD_WITHOUT_KOJI_BUILD_TYPE,
                          PROD_WITH_SECRET_BUILD_TYPE):
            return self.create_prod_build(namespace=namespace, **kwargs)
        elif build_type == SIMPLE_BUILD_TYPE:
            return self.create_simple_build(namespace=namespace, **kwargs)
        elif build_type == PROD_WITH_SECRET_BUILD_TYPE:
            return self.create_prod_with_secret_build(namespace=namespace, **kwargs)
        else:
            raise OsbsException("Unknown build type: '%s'" % build_type)

    @osbsapi
    def get_build_logs(self, build_id, follow=False, build_json=None, wait_if_missing=False,
                       namespace=DEFAULT_NAMESPACE):
        """
        provide logs from build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :param namespace: str
        :return: None, str or iterator
        """
        return self.os.logs(build_id, follow=follow, build_json=build_json,
                            wait_if_missing=wait_if_missing, namespace=namespace)

    @osbsapi
    def get_docker_build_logs(self, build_id, decode_logs=True, build_json=None,
                              namespace=DEFAULT_NAMESPACE):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :param namespace: str
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id, namespace=namespace)
            build_response = BuildResponse(build)
        else:
            build_response = BuildResponse(None, build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.wait_for_build_to_finish(build_id, namespace=namespace)
        build_response = BuildResponse(None, response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.wait_for_build_to_get_scheduled(build_id, namespace=namespace)
        build_response = BuildResponse(None, response)
        return build_response

    @osbsapi
    def update_labels_on_build(self, build_id, labels,
                               namespace=DEFAULT_NAMESPACE):
        response = self.os.update_labels_on_build(build_id, labels,
                                                  namespace=namespace)
    @osbsapi
    def set_labels_on_build(self, build_id, labels, namespace=DEFAULT_NAMESPACE):
        response = self.os.set_labels_on_build(build_id, labels, namespace=namespace)
        return response

    @osbsapi
    def update_labels_on_build_config(self, build_config_id, labels,
                                      namespace=DEFAULT_NAMESPACE):
        response = self.os.update_labels_on_build_config(build_config_id,
                                                         labels,
                                                         namespace=namespace)
        return response

    @osbsapi
    def set_labels_on_build_config(self, build_config_id, labels,
                                   namespace=DEFAULT_NAMESPACE):
        response = self.os.set_labels_on_build_config(build_config_id,
                                                      labels,
                                                      namespace=namespace)
        return response

    @osbsapi
    def update_annotations_on_build(self, build_id, annotations,
                                    namespace=DEFAULT_NAMESPACE):
        return self.os.update_annotations_on_build(build_id, annotations,
                                                   namespace=namespace)

    @osbsapi
    def set_annotations_on_build(self, build_id, annotations, namespace=DEFAULT_NAMESPACE):
        return self.os.set_annotations_on_build(build_id, annotations, namespace=namespace)

    @osbsapi
    def import_image(self, name, namespace=DEFAULT_NAMESPACE):
        return self.os.import_image(name, namespace=namespace)

    @osbsapi
    def get_token(self):
        return self.os.get_oauth_token()

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()

    @osbsapi
    def get_image_stream(self, stream_id, namespace=DEFAULT_NAMESPACE):
        return self.os.get_image_stream(stream_id, namespace)

    @osbsapi
    def create_image_stream(self, name, docker_image_repository, namespace=DEFAULT_NAMESPACE):
        img_stream_file = os.path.join(self.os_conf.get_build_json_store(), 'image_stream.json')
        stream = json.load(open(img_stream_file))
        stream['metadata']['name'] = name
        stream['spec']['dockerImageRepository'] = docker_image_repository
        return self.os.create_image_stream(json.dumps(stream), namespace=DEFAULT_NAMESPACE)
示例#17
0
文件: api.py 项目: pbabinca/osbs
class OSBS(object):
    """ """
    @osbsapi
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            use_kerberos=self.os_conf.get_use_kerberos(),
                            use_auth=self.os_conf.get_use_auth(),
                            verify_ssl=self.os_conf.get_verify_ssl())
        self._bm = None

    # some calls might not need build manager so let's make it lazy
    @property
    def bm(self):
        if self._bm is None:
            self._bm = BuildManager(build_json_store=self.os_conf.get_build_json_store())
        return self._bm

    @osbsapi
    def list_builds(self, namespace=DEFAULT_NAMESPACE):
        response = self.os.list_builds(namespace=namespace)
        serialized_response = response.json()
        build_list = []
        for build in serialized_response["items"]:
            build_list.append(BuildResponse(response, build))
        return build_list

    @osbsapi
    def get_build(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.get_build(build_id, namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def get_build_request(self, build_type=None):
        """
        return instance of BuildRequest according to specified build type

        :param build_type: str, name of build type
        :return: instance of BuildRequest
        """
        build_type = build_type or self.build_conf.get_build_type()
        build_request = self.bm.get_build_request_by_type(build_type=build_type)

        # Apply configured resource limits.
        cpu_limit = self.build_conf.get_cpu_limit()
        memory_limit = self.build_conf.get_memory_limit()
        storage_limit = self.build_conf.get_storage_limit()
        if (cpu_limit is not None or
                memory_limit is not None or
                storage_limit is not None):
            build_request.set_resource_limits(cpu=cpu_limit,
                                              memory=memory_limit,
                                              storage=storage_limit)

        return build_request

    @osbsapi
    def create_build_from_buildrequest(self, build_request, namespace=DEFAULT_NAMESPACE):
        """
        render provided build_request and submit build from it

        :param build_request: instance of build.build_request.BuildRequest
        :param namespace: str, place/context where the build should be executed
        :return: instance of build.build_response.BuildResponse
        """
        build = build_request.render()
        response = self.os.create_build(json.dumps(build.build_json), namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def create_prod_build(self, git_uri, git_ref, user, component, target, architecture, yum_repourls=None,
                          namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(PROD_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            yum_repourls=yum_repourls,
            metadata_plugin_use_auth=self.build_conf.get_metadata_plugin_use_auth(),
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_prod_with_secret_build(self, git_uri, git_ref, user, component, target, architecture,
                                      yum_repourls=None, namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(PROD_WITH_SECRET_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            yum_repourls=yum_repourls,
            source_secret=self.build_conf.get_source_secret(),
            metadata_plugin_use_auth=self.build_conf.get_metadata_plugin_use_auth(),
            pulp_registry=self.os_conf.get_pulp_registry(),
            nfs_server_path=self.os_conf.get_nfs_server_path(),
            nfs_dest_dir=self.build_conf.get_nfs_destination_dir(),
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_prod_without_koji_build(self, git_uri, git_ref, user, component, architecture, yum_repourls=None,
                                       namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(PROD_WITHOUT_KOJI_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            sources_command=self.build_conf.get_sources_command(),
            architecture=architecture,
            vendor=self.build_conf.get_vendor(),
            build_host=self.build_conf.get_build_host(),
            authoritative_registry=self.build_conf.get_authoritative_registry(),
            yum_repourls=yum_repourls,
            metadata_plugin_use_auth=self.build_conf.get_metadata_plugin_use_auth(),
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        return build_response

    @osbsapi
    def create_simple_build(self, git_uri, git_ref, user, component, yum_repourls=None,
                            namespace=DEFAULT_NAMESPACE, **kwargs):
        build_request = self.get_build_request(SIMPLE_BUILD_TYPE)
        build_request.set_params(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            yum_repourls=yum_repourls,
            metadata_plugin_use_auth=self.build_conf.get_metadata_plugin_use_auth(),
        )
        build_json = build_request.render()
        response = self.os.create_build(json.dumps(build_json), namespace=namespace)
        build_response = BuildResponse(response)
        logger.debug(build_response.json)
        return build_response

    @osbsapi
    def create_build(self, namespace=DEFAULT_NAMESPACE, **kwargs):
        """
        take input args, create build request from provided build type and submit the build

        :param namespace: str, place/context where the build should be executed
        :param kwargs: keyword args for build
        :return: instance of BuildRequest
        """
        build_type = self.build_conf.get_build_type()
        if build_type == PROD_BUILD_TYPE:
            return self.create_prod_build(namespace=namespace, **kwargs)
        elif build_type == SIMPLE_BUILD_TYPE:
            return self.create_simple_build(namespace=namespace, **kwargs)
        elif build_type == PROD_WITHOUT_KOJI_BUILD_TYPE:
            return self.create_prod_without_koji_build(namespace=namespace, **kwargs)
        elif build_type == PROD_WITH_SECRET_BUILD_TYPE:
            return self.create_prod_with_secret_build(namespace=namespace, **kwargs)
        else:
            raise OsbsException("Unknown build type: '%s'" % build_type)

    @osbsapi
    def get_build_logs(self, build_id, follow=False, build_json=None, wait_if_missing=False,
                       namespace=DEFAULT_NAMESPACE):
        """
        provide logs from build

        :param build_id: str
        :param follow: bool, fetch logs as they come?
        :param build_json: dict, to save one get-build query
        :param wait_if_missing: bool, if build doesn't exist, wait
        :param namespace: str
        :return: None, str or iterator
        """
        return self.os.logs(build_id, follow=follow, build_json=build_json,
                            wait_if_missing=wait_if_missing, namespace=namespace)

    @osbsapi
    def get_docker_build_logs(self, build_id, decode_logs=True, build_json=None,
                              namespace=DEFAULT_NAMESPACE):
        """
        get logs provided by "docker build"

        :param build_id: str
        :param decode_logs: bool, docker by default output logs in simple json structure:
            { "stream": "line" }
            if this arg is set to True, it decodes logs to human readable form
        :param build_json: dict, to save one get-build query
        :param namespace: str
        :return: str
        """
        if not build_json:
            build = self.os.get_build(build_id, namespace=namespace)
            build_response = BuildResponse(build)
        else:
            build_response = BuildResponse(None, build_json)

        if build_response.is_finished():
            logs = build_response.get_logs(decode_logs=decode_logs)
            return logs
        logger.warning("build haven't finished yet")

    @osbsapi
    def wait_for_build_to_finish(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.wait_for_build_to_finish(build_id, namespace=namespace)
        build_response = BuildResponse(None, response)
        return build_response

    @osbsapi
    def wait_for_build_to_get_scheduled(self, build_id, namespace=DEFAULT_NAMESPACE):
        response = self.os.wait_for_build_to_get_scheduled(build_id, namespace=namespace)
        build_response = BuildResponse(None, response)
        return build_response

    @osbsapi
    def set_labels_on_build(self, build_id, labels, namespace=DEFAULT_NAMESPACE):
        response = self.os.set_labels_on_build(build_id, labels, namespace=namespace)
        return response

    @osbsapi
    def get_token(self):
        return self.os.get_oauth_token()

    @osbsapi
    def get_user(self, username="******"):
        return self.os.get_user(username).json()
示例#18
0
文件: api.py 项目: mmilata/osbs
class OSBS(object):
    """ """
    def __init__(self, openshift_configuration, build_configuration):
        """ """
        self.os_conf = openshift_configuration
        self.build_conf = build_configuration
        self.os = Openshift(openshift_api_url=self.os_conf.get_openshift_api_uri(),
                            openshift_oauth_url=self.os_conf.get_openshift_oauth_api_uri(),
                            kubelet_base=self.os_conf.get_kubelet_uri(),
                            verbose=self.os_conf.get_verbosity(),
                            username=self.os_conf.get_username(),
                            password=self.os_conf.get_password(),
                            verify_ssl=self.os_conf.get_verify_ssl())
        self._bm = None

    # some calls might not need build manager so let's make it lazy
    @property
    def bm(self):
        if self._bm is None:
            self._bm = BuildManager(build_json_store=self.os_conf.get_build_json_store())
        return self._bm

    def list_builds(self):
        # FIXME: return list of BuildResponse objects
        builds = self.os.list_builds().json()
        return builds

    def get_build(self, build_id):
        response = self.os.get_build(build_id)
        build_response = BuildResponse(response)
        return build_response

    def create_build(self, git_uri, git_ref, user, component, target):
        build = self.bm.get_build(
            git_uri=git_uri,
            git_ref=git_ref,
            user=user,
            component=component,
            registry_uri=self.build_conf.get_registry_uri(),
            openshift_uri=self.os_conf.get_openshift_api_uri(),
            kojiroot=self.build_conf.get_kojiroot(),
            kojihub=self.build_conf.get_kojihub(),
            sources_command=self.build_conf.get_sources_command(),
            koji_target=target,
        )
        response = self.os.create_build(json.dumps(build.build_json))
        build_response = BuildResponse(response)
        return build_response

    def get_build_logs(self, build_id, follow=False):
        if follow:
            return self.os.logs(build_id, follow)
        try:
            build = self.os.get_build(build_id)
        except OpenshiftException as ex:
            if ex.status_code != 404:
                raise
        else:
            if build in ["Complete", "Failed"]:
                return build["metadata"]["labels"]["logs"]
            else:
                return self.os.logs(build_id, follow)

    def wait_for_build_to_finish(self, build_id):
        # FIXME: since OS returns whole build json in watch we could return
        #        instance of BuildResponse here
        response = self.os.wait(build_id)
        return response