def add(self, build): try: annotations = build['metadata']['annotations'] base_image_name = annotations['base-image-name'] repositories = json.loads(annotations['repositories']) when = build['status']['startTimestamp'] duration = int(build['status']['duration']) / (10**9) (upload_size, layer_size) = self._get_upload_size(build) except (KeyError, IndexError): return repos = set([strip_registry_from_image(repo) for repo in repositories['primary']]) duplicates = self.seen.intersection(repos) repos -= duplicates self.seen.update(repos) self.deps[strip_registry_from_image(base_image_name)].update(repos) if list(repos): first_tag = list(repos)[0] self.upload_size[first_tag] = upload_size self.layer_size[first_tag] = layer_size self.duration[first_tag] = duration for repo in repos: self.when[repo] = when self.tags_aliases[repo] = first_tag
def cmd_list_builds(args, osbs): kwargs = {} if args.running: kwargs['running'] = args.running if args.from_json: with open(args.from_json) as fp: builds = [BuildResponse(build) for build in json.load(fp)] else: builds = osbs.list_builds(**kwargs) if args.output == 'json': json_output = [] for build in builds: json_output.append(build.json) print_json_nicely(json_output) elif args.output == 'text': if args.columns: cols_to_display = args.columns.split(",") else: cols_to_display = CLI_LIST_BUILDS_DEFAULT_COLS data = [{ "base_image": "BASE IMAGE NAME", "base_image_id": "BASE IMAGE ID", "commit": "COMMIT", "image": "IMAGE NAME", "unique_image": "UNIQUE IMAGE NAME", "image_id": "IMAGE ID", "koji_build_id": "KOJI BUILD ID", "name": "BUILD ID", "status": "STATUS", "time_created": "TIME CREATED", }] for build in sorted(builds, key=lambda x: x.get_time_created_in_seconds()): unique_image = build.get_image_tag() try: image = strip_registry_from_image( build.get_repositories()["primary"][0]) except (TypeError, KeyError, IndexError): image = "" # "" or unique_image? failed builds don't have that ^ if args.FILTER and args.FILTER not in image: continue if args.running and not build.is_in_progress(): continue b = { "base_image": build.get_base_image_name() or '', "base_image_id": build.get_base_image_id() or '', "commit": build.get_commit_id(), "image": image, "unique_image": unique_image, "image_id": build.get_image_id() or '', "koji_build_id": build.get_koji_build_id() or '', "name": build.get_build_name(), "status": build.status, "time_created": build.get_time_created(), } data.append(b) tp = TablePrinter(data, cols_to_display) tp.render()
def cmd_list_builds(args, osbs): kwargs = {} if args.running: field_selector = ",".join(["status!={status}".format(status=status.capitalize()) for status in BUILD_FINISHED_STATES]) kwargs['field_selector'] = field_selector if args.from_json: with open(args.from_json) as fp: builds = [BuildResponse(build) for build in json.load(fp)] else: builds = osbs.list_builds(**kwargs) if args.output == 'json': json_output = [] for build in builds: json_output.append(build.json) print_json_nicely(json_output) elif args.output == 'text': if args.columns: cols_to_display = args.columns.split(",") else: cols_to_display = CLI_LIST_BUILDS_DEFAULT_COLS data = [{ "base_image": "BASE IMAGE NAME", "base_image_id": "BASE IMAGE ID", "commit": "COMMIT", "image": "IMAGE NAME", "unique_image": "UNIQUE IMAGE NAME", "image_id": "IMAGE ID", "name": "BUILD ID", "status": "STATUS", "time_created": "TIME CREATED", }] for build in sorted(builds, key=lambda x: x.get_time_created_in_seconds()): unique_image = build.get_image_tag() try: image = strip_registry_from_image(build.get_repositories()["primary"][0]) except (TypeError, KeyError, IndexError): image = "" # "" or unique_image? failed builds don't have that ^ if args.FILTER: if args.FILTER not in image: continue if args.running and not build.is_in_progress(): continue b = { "base_image": build.get_base_image_name() or '', "base_image_id": build.get_base_image_id() or '', "commit": build.get_commit_id(), "image": image, "unique_image": unique_image, "image_id": build.get_image_id() or '', "name": build.get_build_name(), "status": build.status, "time_created": build.get_time_created(), } data.append(b) tp = TablePrinter(data, cols_to_display) tp.render()
def add(self, build): try: annotations = build['metadata']['annotations'] base_image_name = annotations['base-image-name'] repositories = json.loads(annotations['repositories']) when = build['status']['startTimestamp'] except KeyError: return repos = set([ strip_registry_from_image(repo) for repo in repositories['primary'] ]) duplicates = self.seen.intersection(repos) repos -= duplicates self.seen.update(repos) self.deps[strip_registry_from_image(base_image_name)].update(repos) for repo in repos: self.when[repo] = when
def test_strip_registry_from_image(img, expected): assert strip_registry_from_image(img) == expected
def _get_upload_size(self, build): annotations = build['metadata']['annotations'] image_id = annotations['image-id'] if image_id in self.pulp_upload_size.keys(): return (0, self.pulp_upload_size[image_id]) repos_json = json.loads(annotations['repositories']) unique_repos = repos_json['unique'] if not unique_repos: self.pulp_upload_size[image_id] = 0 return (0, 0) repo = repos_json['primary'][0] expected_image_name = "%s/%s:%s-%s" % ( repo.split('/')[-2], repo.split('/')[-1].split(':')[0], repo.split('/')[-1].split(':')[1].split('-')[0], '-'.join(repo.split('/')[-1].split(':')[1].split('-')[1:]) ) full_image_name = unique_repos[0] image_name = '-'.join(strip_registry_from_image(full_image_name).split('/')) if not self.pulp_base_url: # Base url for pulp is not specified - fall back to an old method try: size = json.loads(annotations['tar_metadata'])['size'] return (size, size) except: return (0, 0) pulp_repo_url = '%s/pulp/docker/v1/redhat-%s' % (self.pulp_base_url, image_name.split(':')[0]) if image_id in self.known_pulp_layers.keys(): size = self.found_image_name_sizes[expected_image_name] self.pulp_upload_size[image_id] = size sys.stderr.write(" found cached size for %s: %s\n" % (image_id, size)) (parent_layer, layer_size, image_name) = self._get_layer_info(image_id, pulp_repo_url) total_size = self._get_size_with_parent_layers(image_name, parent_layer) return (size, total_size) pulp_url = '%s/%s/json' % (pulp_repo_url, image_id) sys.stderr.write("Looking for image size at %s\n" % pulp_url) try: r = requests.get(pulp_url, verify=False) r.raise_for_status() size = r.json()['Size'] self.pulp_upload_size[image_id] = size (parent_layer, layer_size, image_name) = self._get_layer_info(image_id, pulp_repo_url) total_size = self._get_size_with_parent_layers(image_name, parent_layer) return (total_size, size) except requests.HTTPError: try: sys.stderr.write(" looking for layers at %s with image_name %s\n" % ( pulp_repo_url, expected_image_name)) # Wrong image_id, go to repo's page and check every layer id r = requests.get(pulp_repo_url, verify=False) matches = re.findall(r'href="(.+)/"', r.text) sys.stderr.write(" found %s layers\n" % len(matches)) # Skip the header link for layer_id in matches[1:]: (parent_layer, layer_size, image_name) = self._get_layer_info( layer_id, pulp_repo_url) self.found_image_name_sizes[image_name] = layer_size self.known_pulp_layers[layer_id] = image_name if image_name != expected_image_name: sys.stderr.write(" %s != %s\n" % (image_name, expected_image_name)) continue sys.stderr.write(" found layer with size %s\n" % layer_size) parent_layer_size = self._get_size_with_parent_layers(image_name, parent_layer) sys.stderr.write(" parent's layer size: %s\n" % parent_layer_size) total_size = layer_size + parent_layer_size self.pulp_upload_size[image_id] = total_size return (total_size, layer_size) else: raise RuntimeError("No matching layer found") except Exception as e: sys.stderr.write(" cannot find layer info at this url: %s\n" % (repr(e))) self.pulp_upload_size[image_id] = 0 return (0, 0)