def get_logs(self, decode_logs=True): """ :param decode_logs: bool, docker by default output logs in simple json structure: { "stream": "line" } if this arg is set to True, it decodes logs to human readable form :return: str """ logs = graceful_chain_get(self.get_annotations_or_labels(), "logs") if not logs: logger.error("no logs") return "" if decode_logs: output = [] for line in logs.split("\n"): try: decoded_line = json.loads(line) except ValueError: continue output += [decoded_line.get("stream", "").strip()] error = decoded_line.get("error", "").strip() if error: output += [error] error_detail = decoded_line.get("errorDetail", "").strip() if error_detail: output += [error_detail] output += "\n" return "\n".join(output) else: return logs
def _update_build_config_when_exist(self, build_json): existing_bc = self._get_existing_build_config(build_json) self._verify_labels_match(build_json, existing_bc) # Existing build config may have a different name if matched by # git-repo-name and git-branch labels. Continue using existing # build config name. build_config_name = existing_bc['metadata']['name'] logger.debug('existing build config name to be used "%s"', build_config_name) self._verify_no_running_builds(build_config_name) # Remove nodeSelector, will be set from build_json for worker build old_nodeselector = existing_bc['spec'].pop('nodeSelector', None) logger.debug("removing build config's nodeSelector %s", old_nodeselector) # Remove koji_task_id koji_task_id = utils.graceful_chain_get(existing_bc, 'metadata', 'labels', 'koji-task-id') if koji_task_id is not None: logger.debug("removing koji-task-id %r", koji_task_id) utils.graceful_chain_del(existing_bc, 'metadata', 'labels', 'koji-task-id') utils.buildconfig_update(existing_bc, build_json) # Reset name change that may have occurred during # update above, since renaming is not supported. existing_bc['metadata']['name'] = build_config_name logger.debug('build config for %s already exists, updating...', build_config_name) self.os.update_build_config(build_config_name, json.dumps(existing_bc)) return existing_bc
def get_container_image_ids(self): """ Find the image IDs the containers use. :return: dict, image tag to docker ID """ statuses = graceful_chain_get(self.json, "status", "containerStatuses") if statuses is None: return {} def remove_prefix(image_id): # Can *currently* be one of None, 'docker://', or # 'docker-pullable://', but is subject to change. try: # Raises 'ValueError' if not found index = image_id.index('://') image_id = image_id[index + 3:] except ValueError: pass return image_id return { status['image']: remove_prefix(status['imageID']) for status in statuses }
def get_logs(self, decode_logs=True): """ :param decode_logs: bool, docker by default output logs in simple json structure: { "stream": "line" } if this arg is set to True, it decodes logs to human readable form :return: str """ logs = graceful_chain_get(self.get_annotations_or_labels(), "logs") if not logs: logger.debug("no logs found in annotations") return "" if decode_logs: output = [] for line in logs.split("\n"): try: decoded_line = json.loads(line) except ValueError: continue output += [decoded_line.get("stream", "").strip()] error = decoded_line.get("error", "").strip() if error: output += [error] error_detail = decoded_line.get("errorDetail", {}) error_msg = error_detail.get("message", "").strip() if error_msg: output += [error_msg] output += "\n" return "\n".join(output) else: return logs
def get_build_config_by_labels_filtered(self, label_selectors, filter_key, filter_value): """ Returns a build config matching the given label selectors, filtering against another predetermined value. This method will raise OsbsException if not exactly one build config is found after filtering. """ items = self.get_all_build_configs_by_labels(label_selectors) if filter_value is not None: build_configs = [] for build_config in items: match_value = graceful_chain_get(build_config, *filter_key.split('.')) if filter_value == match_value: build_configs.append(build_config) items = build_configs if not items: raise OsbsException("Build config not found for labels: %r" % (label_selectors, )) if len(items) > 1: raise OsbsException( "More than one build config found for labels: %r" % (label_selectors, )) return items[0]
def find_repositories_from_archive(self): for archive in self.koji_session.listArchives(self._koji_parent_build_info['id']): repositories = graceful_chain_get(archive, 'extra', 'docker', 'repositories') if repositories: self.log.info('Using repositories from archive %d', archive['id']) return repositories return None
def cmd_get_all_resource_quota(args, osbs): quota_name = args.QUOTA_NAME logger.debug("quota name = %s", quota_name) if quota_name is None: response = osbs.list_resource_quotas() for item in response["items"]: print(graceful_chain_get(item, "metadata", "name")) else: print_json_nicely(osbs.get_resource_quota(quota_name))
def _prepare_resource(resource_type, resource): utils.graceful_chain_del(resource, 'metadata', 'resourceVersion') if resource_type == 'buildconfigs': utils.graceful_chain_del(resource, 'status', 'lastVersion') triggers = utils.graceful_chain_get(resource, 'spec', 'triggers') or () for t in triggers: utils.graceful_chain_del(t, 'imageChange', 'lastTrigerredImageID')
def find_repositories_from_build(self): self._koji_parent_build_info = self.koji_session.getBuild(self.koji_parent_build) if not self._koji_parent_build_info: raise RuntimeError('Koji build, {}, not found'.format(self.koji_parent_build)) repositories = graceful_chain_get(self._koji_parent_build_info, 'extra', 'image', 'index', 'pull') if repositories: self.log.info('Using repositories from build info') return repositories
def get_data_by_key(self, name): """ Find the object stored by a JSON string at key 'name' :return: str or dict, the json of the str or dict stored in the ConfigMap at that location """ data = graceful_chain_get(self.json, "data") if data is None or name not in data: return {} return json.loads(data[name])
def get_error_message(self): """ Return an error message based on atomic-reactor's metadata """ try: str_metadata = graceful_chain_get(self.get_annotations_or_labels(), "plugins-metadata") metadata_dict = json.loads(str_metadata) plugin, error_message = list(metadata_dict['errors'].items())[0] if error_message: # Plugin has non-empty error description return "Error in plugin %s: %s" % (plugin, error_message) else: return "Error in plugin %s" % plugin except Exception: return None
def get_data(self): """ Find the data stored in the config_map :return: dict, the json of the data data that was passed into the ConfigMap on creation """ data = graceful_chain_get(self.json, "data") if data is None: return {} data_dict = {} for key in data: data_dict[key] = json.loads(data[key]) return data_dict
def wait_for_new_build_config_instance(self, build_config_id, prev_version): logger.info("waiting for build config %s to get instantiated", build_config_id) for changetype, obj in self.watch_resource("buildconfigs", build_config_id): if changetype == WATCH_MODIFIED: version = graceful_chain_get(obj, 'status', 'lastVersion') if not isinstance(version, numbers.Integral): logger.error("BuildConfig %s has unexpected lastVersion: %s", build_config_id, version) continue if version > prev_version: return "%s-%s" % (build_config_id, version) if changetype == WATCH_DELETED: logger.error("BuildConfig deleted while waiting for new build instance") break raise OsbsResponseException("New BuildConfig instance not found")
def restore_resource(self, resource_type, resources, continue_on_error=False): nfailed = 0 for r in resources["items"]: name = utils.graceful_chain_get(r, 'metadata', 'name') or '(no name)' logger.debug("restoring %s/%s", resource_type, name) try: self._prepare_resource(resource_type, r) self.os.restore_resource(resource_type, r) except Exception: if continue_on_error: logger.exception("failed to restore %s/%s", resource_type, name) nfailed += 1 else: raise if continue_on_error: ntotal = len(resources["items"]) logger.info("restored %s/%s %s", ntotal - nfailed, ntotal, resource_type)
def wait_for_new_build_config_instance(self, build_config_id, prev_version): logger.info("waiting for build config %s to get instantiated", build_config_id) for changetype, obj in self.watch_resource("buildconfigs", build_config_id): if changetype == WATCH_MODIFIED: version = graceful_chain_get(obj, 'status', 'lastVersion') if not isinstance(version, numbers.Integral): logger.error("BuildConfig %s has unexpected lastVersion: %s", build_config_id, version) continue if version > prev_version: return "%s-%s" % (build_config_id, version) if changetype == WATCH_DELETED: logger.error("BuildConfig deleted while waiting for new build instance") break raise OsbsResponseException("New BuildConfig instance not found", httplib.NOT_FOUND)
def restore_resource(self, resource_type, resources, continue_on_error=False): nfailed = 0 for r in resources["items"]: name = utils.graceful_chain_get(r, 'metadata', 'name') or '(no name)' logger.debug("restoring %s/%s", resource_type, name) try: self._prepare_resource(r) self.os.restore_resource(resource_type, r) except Exception: if continue_on_error: logger.exception("failed to restore %s/%s", resource_type, name) nfailed += 1 else: raise if continue_on_error: ntotal = len(resources["items"]) logger.info("restored %s/%s %s", ntotal - nfailed, ntotal, resource_type)
def get_container_image_ids(self): """ Find the image IDs the containers use. :return: dict, image tag to docker ID """ statuses = graceful_chain_get(self.json, "status", "containerStatuses") if statuses is None: return {} def remove_prefix(image_id, prefix): if image_id.startswith(prefix): return image_id[len(prefix):] return image_id return dict([(status['image'], remove_prefix(status['imageID'], 'docker://')) for status in statuses])
def get_error_reason(self): str_metadata = graceful_chain_get(self.get_annotations(), "plugins-metadata") if str_metadata: try: metadata_dict = json.loads(str_metadata) plugin, error_message = list( metadata_dict['errors'].items())[0] return {'plugin': [plugin, error_message]} except (ValueError, KeyError, IndexError): pass if not self.osbs: return { 'pod': 'OSBS unavailable; Pod related errors cannot be retrieved' } try: pod = self.osbs.get_pod_for_build(self.get_build_name()) return {'pod': pod.get_failure_reason()} except OsbsException: return None
def get_rpm_packages(self): return graceful_chain_get(self.get_annotations_or_labels(), "rpm-packages")
def get_annotations(self): return graceful_chain_get(self.json, "metadata", "annotations")
def get_image_tag(self): return graceful_chain_get(self.json, "spec", "output", "to", "name")
def get_tar_metadata_filename(self): return graceful_chain_get(self.get_tar_metadata(), "filename")
def get_tar_metadata_md5sum(self): return graceful_chain_get(self.get_tar_metadata(), "md5sum")
def get_tar_metadata(self): tar_md_json = graceful_chain_get(self.get_annotations_or_labels(), "tar_metadata") if tar_md_json: return json.loads(tar_md_json)
def get_commit_id(self): return graceful_chain_get(self.get_annotations_or_labels(), "commit_id")
def get_koji_build_id(self): return graceful_chain_get(self.get_labels(), "koji-build-id")
def get_filesystem_koji_task_id(self): res = self.get_result(self.workflow.prebuild_results.get(PLUGIN_ADD_FILESYSTEM_KEY)) return graceful_chain_get(res, 'filesystem-koji-task-id')
def get_base_image_name(self): return graceful_chain_get(self.get_annotations_or_labels(), "base-image-name")
def get_image_id(self): return graceful_chain_get(self.get_annotations_or_labels(), "image-id")
def get_tar_metadata_sha256sum(self): return graceful_chain_get(self.get_tar_metadata(), "sha256sum")
def get_tar_metadata_size(self): return graceful_chain_get(self.get_tar_metadata(), "size")
def get_build_name(self): return graceful_chain_get(self.json, "metadata", "name")
def get_repositories(self): repositories_json = graceful_chain_get(self.get_annotations_or_labels(), "repositories") if repositories_json: return json.loads(repositories_json)
def get_time_created(self): return graceful_chain_get(self.json, "metadata", "creationTimestamp")
def get_labels(self): return graceful_chain_get(self.json, "metadata", "labels")
def get_dockerfile(self): return graceful_chain_get(self.get_annotations_or_labels(), "dockerfile")
def get_digests(self): digests_json = graceful_chain_get(self.get_annotations_or_labels(), "digests") if digests_json: return json.loads(digests_json)