def test_get_openshift_session(self, config, raise_error): required_config = """\ version: 1 koji: hub_url: / root_url: '' auth: {} source_registry: url: source_registry.com registries: - url: registry_url """ config += "\n" + required_config if raise_error: with pytest.raises(Exception): read_yaml(config, 'schemas/config.json') return config_json = read_yaml(config, 'schemas/config.json') auth_info = { 'openshift_url': config_json['openshift']['url'], 'verify_ssl': not config_json['openshift'].get('insecure', False), 'use_auth': False, 'conf_file': None, 'namespace': 'namespace', } if config_json['openshift'].get('auth'): if config_json['openshift']['auth'].get('krb_keytab_path'): auth_info['kerberos_keytab'] =\ config_json['openshift']['auth'].get('krb_keytab_path') if config_json['openshift']['auth'].get('krb_principal'): auth_info['kerberos_principal'] =\ config_json['openshift']['auth'].get('krb_principal') if config_json['openshift']['auth'].get('krb_cache_path'): auth_info['kerberos_ccache'] =\ config_json['openshift']['auth'].get('krb_cache_path') if config_json['openshift']['auth'].get('ssl_certs_dir'): auth_info['client_cert'] =\ os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'cert') auth_info['client_key'] =\ os.path.join(config_json['openshift']['auth'].get('ssl_certs_dir'), 'key') auth_info['use_auth'] = config_json['openshift']['auth'].get('enable', False) (flexmock(osbs.conf.Configuration) .should_call('__init__') .with_args(**auth_info) .once()) (flexmock(osbs.api.OSBS) .should_call('__init__') .once()) conf = Configuration(raw_config=config_json) get_openshift_session(conf, 'namespace')
def openshift_session(self): if not self._openshift_session: self._openshift_session = \ get_openshift_session(self.workflow.conf, self.workflow.user_params.get('namespace')) return self._openshift_session
def remote_hosts_unlocking_recovery(job_args: dict) -> None: config = Configuration(config_path=job_args['config_file']) osbs = get_openshift_session(config, job_args['namespace']) remote_host_pools = config.remote_hosts.get("pools") for platform in remote_host_pools.keys(): platform_pool = remote_host.RemoteHostsPool.from_config( config.remote_hosts, platform) for host in platform_pool.hosts: logger.info("Checking occupied slots for platform: %s on host: %s", platform, host.hostname) for slot in range(host.slots): prid = host.prid_in_slot(slot) if not prid: continue logger.info("slot: %s is occupied by prid: %s", slot, prid) if not osbs.build_not_finished(prid): logger.info('prid: %s finished, will unlock slot: %s', prid, slot) host.unlock(slot, prid)
def __init__(self, workflow, blocksize=None, poll_interval=5, userdata=None): """ constructor :param workflow: DockerBuildWorkflow instance :param blocksize: int, blocksize to use for uploading files :param poll_interval: int, seconds between Koji task status requests :param userdata: dict, custom user data """ super(KojiImportBase, self).__init__(workflow) self.blocksize = blocksize self.poll_interval = poll_interval self.osbs = get_openshift_session(self.workflow.conf, self.workflow.namespace) self.build_id = None self.session = None self.userdata = userdata self.koji_task_id = None koji_task_id = self.workflow.user_params.get('koji_task_id') if koji_task_id is not None: try: self.koji_task_id = int(koji_task_id) except ValueError: # Why pass 1 to exc_info originally? self.log.error("invalid task ID %r", koji_task_id, exc_info=1)
def run(self): pipeline_run_name = self.workflow.pipeline_run_name self.log.info("pipelineRun name = %s", pipeline_run_name) osbs = get_openshift_session(self.workflow.conf, self.workflow.namespace) wf_data = self.workflow.data if not self.source_build: try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" base_image = wf_data.dockerfile_images.original_base_image if base_image is not None and not wf_data.dockerfile_images.base_from_scratch: base_image_name = base_image try: # OSBS2 TBD: we probably don't need this and many other annotations anymore base_image_id = self.workflow.imageutil.base_image_inspect( ).get('Id', "") except KeyError: base_image_id = "" else: base_image_name = "" base_image_id = "" parent_images_strings = self.workflow.parent_images_to_str() if wf_data.dockerfile_images.base_from_scratch: parent_images_strings[SCRATCH_FROM] = SCRATCH_FROM try: with open(self.workflow.df_path) as f: dockerfile_contents = f.read() except AttributeError: dockerfile_contents = "" annotations = { 'digests': json.dumps(self.get_pullspecs(self.get_digests())), 'plugins-metadata': json.dumps(self.get_plugin_metadata()), 'filesystem': json.dumps(self.get_filesystem_metadata()), } if self.source_build: annotations['image-id'] = '' if wf_data.koji_source_manifest: annotations['image-id'] = wf_data.koji_source_manifest[ 'config']['digest'] else: annotations['dockerfile'] = dockerfile_contents annotations['commit_id'] = commit_id annotations['base-image-id'] = base_image_id annotations['base-image-name'] = base_image_name # OSBS2 TBD annotations['image-id'] = wf_data.image_id or '' annotations['parent_images'] = json.dumps(parent_images_strings) media_types = [] media_results = wf_data.postbuild_results.get(PLUGIN_VERIFY_MEDIA_KEY) if isinstance(media_results, Exception): media_results = None if media_results: media_types += media_results if media_types: annotations['media-types'] = json.dumps( sorted(list(set(media_types)))) tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(wf_data.exported_image_sequence) > 0: # OSBS2 TBD exported_image_sequence will not work for multiple platform info = wf_data.exported_image_sequence[-1] tar_path = info.get("path") tar_size = info.get("size") tar_md5sum = info.get("md5sum") tar_sha256sum = info.get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) self.apply_plugin_annotations(annotations) self.set_koji_task_annotations_whitelist(annotations) try: osbs.update_annotations_on_build(pipeline_run_name, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(pipeline_run_name, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}
def osbs(self) -> OSBS: return get_openshift_session(self.conf, self.namespace)