def get(self, parsed_args, page_token): """ List the usage logs for the current system. """ if SuperUserPermission().can(): start_time = parsed_args["starttime"] end_time = parsed_args["endtime"] (start_time, end_time) = _validate_logs_arguments(start_time, end_time) log_entry_page = logs_model.lookup_logs(start_time, end_time, page_token=page_token) return ( { "start_time": format_date(start_time), "end_time": format_date(end_time), "logs": [ log.to_dict(avatar, include_namespace=True) for log in log_entry_page.logs ], }, log_entry_page.next_page_token, ) raise Unauthorized()
def _tag_dict(tag): tag_info = { "name": tag.name, "reversion": tag.reversion, } if tag.lifetime_start_ts and tag.lifetime_start_ts > 0: tag_info["start_ts"] = tag.lifetime_start_ts if tag.lifetime_end_ts and tag.lifetime_end_ts > 0: tag_info["end_ts"] = tag.lifetime_end_ts # TODO: Remove this once fully on OCI data model. if tag.legacy_image_if_present: tag_info["docker_image_id"] = tag.legacy_image.docker_image_id tag_info["image_id"] = tag.legacy_image.docker_image_id tag_info["size"] = tag.legacy_image.aggregate_size # TODO: Remove this check once fully on OCI data model. if tag.manifest_digest: tag_info["manifest_digest"] = tag.manifest_digest if tag.manifest: tag_info["is_manifest_list"] = tag.manifest.is_manifest_list if tag.lifetime_start_ts and tag.lifetime_start_ts > 0: last_modified = format_date(datetime.utcfromtimestamp(tag.lifetime_start_ts)) tag_info["last_modified"] = last_modified if tag.lifetime_end_ts is not None: expiration = format_date(datetime.utcfromtimestamp(tag.lifetime_end_ts)) tag_info["expiration"] = expiration return tag_info
def _tag_dict(tag): tag_info = { "name": tag.name, "reversion": tag.reversion, } if tag.lifetime_start_ts and tag.lifetime_start_ts > 0: tag_info["start_ts"] = tag.lifetime_start_ts if tag.lifetime_end_ts and tag.lifetime_end_ts > 0: tag_info["end_ts"] = tag.lifetime_end_ts tag_info["manifest_digest"] = tag.manifest_digest tag_info["is_manifest_list"] = tag.manifest.is_manifest_list tag_info["size"] = tag.manifest_layers_size if tag.lifetime_start_ts and tag.lifetime_start_ts > 0: last_modified = format_date(datetime.utcfromtimestamp(tag.lifetime_start_ts)) tag_info["last_modified"] = last_modified if tag.lifetime_end_ts is not None: expiration = format_date(datetime.utcfromtimestamp(tag.lifetime_end_ts)) tag_info["expiration"] = expiration return tag_info
def _get_logs( start_time, end_time, performer_name=None, repository_name=None, namespace_name=None, page_token=None, filter_kinds=None, ): (start_time, end_time) = _validate_logs_arguments(start_time, end_time) log_entry_page = logs_model.lookup_logs( start_time, end_time, performer_name, repository_name, namespace_name, filter_kinds, page_token, app.config["ACTION_LOG_MAX_PAGE"], ) include_namespace = namespace_name is None and repository_name is None return ( { "start_time": format_date(start_time), "end_time": format_date(end_time), "logs": [ log.to_dict(avatar, include_namespace) for log in log_entry_page.logs ], }, log_entry_page.next_page_token, )
def token_view(token, include_code=False): data = { "uuid": token.uuid, "title": token.title, "last_accessed": format_date(token.last_accessed), "created": format_date(token.created), "expiration": format_date(token.expiration), } if include_code: data.update( {"token_code": model.appspecifictoken.get_full_token_string(token),} ) return data
def image_dict(image, with_history=False, with_tags=False): parsed_command = None if image.command: try: parsed_command = json.loads(image.command) except (ValueError, TypeError): parsed_command = {"error": "Could not parse command"} image_data = { "id": image.docker_image_id, "created": format_date(image.created), "comment": image.comment, "command": parsed_command, "size": image.image_size, "uploading": image.uploading, "sort_index": len(image.parents), } if with_tags: image_data["tags"] = [tag.name for tag in image.tags] if with_history: image_data["history"] = [ image_dict(parent) for parent in image.parents ] # Calculate the ancestors string, with the DBID's replaced with the docker IDs. parent_docker_ids = [ parent_image.docker_image_id for parent_image in image.parents ] image_data["ancestors"] = "/{0}/".format("/".join(parent_docker_ids)) return image_data
def _manifest_dict(manifest): layers = None if not manifest.is_manifest_list: layers = registry_model.list_manifest_layers(manifest, storage) if layers is None: logger.debug("Missing layers for manifest `%s`", manifest.digest) abort(404) image = None if manifest.legacy_image_root_id: # NOTE: This is replicating our older response for this endpoint, but # returns empty for the metadata fields. This is to ensure back-compat # for callers still using the deprecated API. image = { "id": manifest.legacy_image_root_id, "created": format_date(datetime.utcnow()), "comment": "", "command": "", "size": 0, "uploading": False, "sort_index": 0, "ancestors": "", } return { "digest": manifest.digest, "is_manifest_list": manifest.is_manifest_list, "manifest_data": manifest.internal_manifest_bytes.as_unicode(), "config_media_type": manifest.config_media_type, "layers": ( [_layer_dict(lyr.layer_info, idx) for idx, lyr in enumerate(layers)] if layers else None ), "image": image, }
def to_dict(self, include_token=False): data = { "name": self.name, "created": format_date(self.created) if self.created is not None else None, "last_accessed": format_date(self.last_accessed) if self.last_accessed is not None else None, "teams": [team.to_dict() for team in self.teams], "repositories": self.repository_names, "description": self.description, } if include_token: data["token"] = self.password return data
def token_view(token, include_code=False): data = { 'uuid': token.uuid, 'title': token.title, 'last_accessed': format_date(token.last_accessed), 'created': format_date(token.created), 'expiration': format_date(token.expiration), } if include_code: data.update({ 'token_code': model.appspecifictoken.get_full_token_string(token), }) return data
def image_dict(image, with_history=False, with_tags=False): parsed_command = None if image.command: try: parsed_command = json.loads(image.command) except (ValueError, TypeError): parsed_command = {'error': 'Could not parse command'} image_data = { 'id': image.docker_image_id, 'created': format_date(image.created), 'comment': image.comment, 'command': parsed_command, 'size': image.image_size, 'uploading': image.uploading, 'sort_index': len(image.parents), } if with_tags: image_data['tags'] = [tag.name for tag in image.tags] if with_history: image_data['history'] = [ image_dict(parent) for parent in image.parents ] # Calculate the ancestors string, with the DBID's replaced with the docker IDs. parent_docker_ids = [ parent_image.docker_image_id for parent_image in image.parents ] image_data['ancestors'] = '/{0}/'.format('/'.join(parent_docker_ids)) return image_data
def to_dict(self, include_metadata=False, include_token=False): data = { "name": self.name, "created": format_date(self.created) if self.created is not None else None, "last_accessed": format_date(self.last_accessed) if self.last_accessed is not None else None, "description": self.description, } if include_token: data["token"] = self.password if include_metadata: data["unstructured_metadata"] = self.unstructured_metadata return data
def build_status_view(build_obj): phase, status, error = _get_build_status(build_obj) repo_namespace = build_obj.repository.namespace_user.username repo_name = build_obj.repository.name can_read = ReadRepositoryPermission(repo_namespace, repo_name).can() can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can() can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can() job_config = get_job_config(build_obj) resp = { 'id': build_obj.uuid, 'phase': phase, 'started': format_date(build_obj.started), 'display_name': build_obj.display_name, 'status': status or {}, 'subdirectory': job_config.get('build_subdir', ''), 'dockerfile_path': job_config.get('build_subdir', ''), 'context': job_config.get('context', ''), 'tags': job_config.get('docker_tags', []), 'manual_user': job_config.get('manual_user', None), 'is_writer': can_write, 'trigger': trigger_view(build_obj.trigger, can_read, can_admin, for_build=True), 'trigger_metadata': job_config.get('trigger_metadata', None) if can_read else None, 'resource_key': build_obj.resource_key, 'pull_robot': user_view(build_obj.pull_robot) if build_obj.pull_robot else None, 'repository': { 'namespace': repo_namespace, 'name': repo_name }, 'error': error, } if can_write or features.READER_BUILD_LOGS: if build_obj.resource_key is not None: resp['archive_url'] = user_files.get_file_url( build_obj.resource_key, get_request_ip(), requires_cors=True) elif job_config.get('archive_url', None): resp['archive_url'] = job_config['archive_url'] return resp
def build_status_view(build_obj): phase, status, error = _get_build_status(build_obj) repo_namespace = build_obj.repository.namespace_user.username repo_name = build_obj.repository.name can_read = ReadRepositoryPermission(repo_namespace, repo_name).can() can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can() can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can() job_config = get_job_config(build_obj) resp = { "id": build_obj.uuid, "phase": phase, "started": format_date(build_obj.started), "display_name": build_obj.display_name, "status": status or {}, "subdirectory": job_config.get("build_subdir", ""), "dockerfile_path": job_config.get("build_subdir", ""), "context": job_config.get("context", ""), "tags": job_config.get("docker_tags", []), "manual_user": job_config.get("manual_user", None), "is_writer": can_write, "trigger": trigger_view(build_obj.trigger, can_read, can_admin, for_build=True), "trigger_metadata": job_config.get("trigger_metadata", None) if can_read else None, "resource_key": build_obj.resource_key, "pull_robot": user_view(build_obj.pull_robot) if build_obj.pull_robot else None, "repository": { "namespace": repo_namespace, "name": repo_name }, "error": error, } if can_write or features.READER_BUILD_LOGS: if build_obj.resource_key is not None: resp["archive_url"] = user_files.get_file_url( build_obj.resource_key, get_request_ip(), requires_cors=True) elif job_config.get("archive_url", None): resp["archive_url"] = job_config["archive_url"] return resp
def notification_view(note): return { "id": note.uuid, "organization": note.target.username if note.target.organization else None, "kind": note.kind.name, "created": format_date(note.created), "metadata": json.loads(note.metadata_json), "dismissed": note.dismissed, }
def get(self, orgname, teamname, parsed_args): """ Retrieve the list of members for the specified team. """ view_permission = ViewTeamPermission(orgname, teamname) edit_permission = AdministerOrganizationPermission(orgname) if view_permission.can(): team = None try: team = model.team.get_organization_team(orgname, teamname) except model.InvalidTeamException: raise NotFound() members = model.organization.get_organization_team_members(team.id) invites = [] if parsed_args["includePending"] and edit_permission.can(): invites = model.team.get_organization_team_member_invites( team.id) data = { "name": teamname, "members": [member_view(m) for m in members] + [invite_view(i) for i in invites], "can_edit": edit_permission.can(), } if features.TEAM_SYNCING and authentication.federated_service: if _syncing_setup_allowed(orgname): data["can_sync"] = { "service": authentication.federated_service, } data["can_sync"].update(authentication.service_metadata()) sync_info = model.team.get_team_sync_information( orgname, teamname) if sync_info is not None: data["synced"] = { "service": sync_info.service.name, } if SuperUserPermission().can(): data["synced"].update({ "last_updated": format_date(sync_info.last_updated), "config": json.loads(sync_info.config), }) return data raise Unauthorized()
def to_dict(self): return { "name": self.name, "last_modified": format_date(datetime.fromtimestamp(self.lifetime_start // 1000)), "channels": self.releases_channels_map[self.name], }
def to_dict(self): return { "name": self.name, "release": self.linked_tag_name, "last_modified": format_date( datetime.fromtimestamp(self.linked_tag_lifetime_start / 1000)), }
def to_dict(self): tag_info = { 'name': self.name, 'image_id': self.image_docker_image_id, 'size': self.image_aggregate_size } if self.lifetime_start_ts > 0: last_modified = format_date(datetime.fromtimestamp(self.lifetime_start_ts)) tag_info['last_modified'] = last_modified if self.lifetime_end_ts: expiration = format_date(datetime.fromtimestamp(self.lifetime_end_ts)) tag_info['expiration'] = expiration if self.tag_manifest_digest is not None: tag_info['manifest_digest'] = self.tag_manifest_digest return tag_info
def notification_view(note): return { 'id': note.uuid, 'organization': note.target.username if note.target.organization else None, 'kind': note.kind.name, 'created': format_date(note.created), 'metadata': json.loads(note.metadata_json), 'dismissed': note.dismissed }
def to_dict(self): tag_info = { "name": self.name, "size": self.image_aggregate_size, } if self.lifetime_start_ts > 0: last_modified = format_date( datetime.fromtimestamp(self.lifetime_start_ts)) tag_info["last_modified"] = last_modified if self.lifetime_end_ts: expiration = format_date( datetime.fromtimestamp(self.lifetime_end_ts)) tag_info["expiration"] = expiration if self.tag_manifest_digest is not None: tag_info["manifest_digest"] = self.tag_manifest_digest return tag_info
def to_dict(self, include_metadata=False, include_token=False): data = { 'name': self.name, 'created': format_date(self.created) if self.created is not None else None, 'last_accessed': format_date(self.last_accessed) if self.last_accessed is not None else None, 'description': self.description, } if include_token: data['token'] = self.password if include_metadata: data['unstructured_metadata'] = self.unstructured_metadata return data
def to_dict(self, include_token=False): data = { 'name': self.name, 'created': format_date(self.created) if self.created is not None else None, 'last_accessed': format_date(self.last_accessed) if self.last_accessed is not None else None, 'teams': [team.to_dict() for team in self.teams], 'repositories': self.repository_names, 'description': self.description, } if include_token: data['token'] = self.password return data
def to_dict(self): resp = { "id": self.uuid, "phase": self.phase, "started": format_date(self.started), "display_name": self.display_name, "status": self.status or {}, "subdirectory": self.job_config.get("build_subdir", ""), "dockerfile_path": self.job_config.get("build_subdir", ""), "context": self.job_config.get("context", ""), "tags": self.job_config.get("docker_tags", []), "manual_user": self.job_config.get("manual_user", None), "is_writer": self.can_write, "trigger": self.trigger.to_dict(), "trigger_metadata": self.job_config.get("trigger_metadata", None) if self.can_read else None, "resource_key": self.resource_key, "pull_robot": user_view(self.pull_robot) if self.pull_robot else None, "repository": { "namespace": self.repository_namespace_user_username, "name": self.repository_name, }, "error": self.error, } if self.can_write: if self.resource_key is not None: resp["archive_url"] = self.archive_url elif self.job_config.get("archive_url", None): resp["archive_url"] = self.job_config["archive_url"] return resp
def to_dict(self): resp = { 'id': self.uuid, 'phase': self.phase, 'started': format_date(self.started), 'display_name': self.display_name, 'status': self.status or {}, 'subdirectory': self.job_config.get('build_subdir', ''), 'dockerfile_path': self.job_config.get('build_subdir', ''), 'context': self.job_config.get('context', ''), 'tags': self.job_config.get('docker_tags', []), 'manual_user': self.job_config.get('manual_user', None), 'is_writer': self.can_write, 'trigger': self.trigger.to_dict(), 'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None, 'resource_key': self.resource_key, 'pull_robot': user_view(self.pull_robot) if self.pull_robot else None, 'repository': { 'namespace': self.repository_namespace_user_username, 'name': self.repository_name }, 'error': self.error, } if self.can_write: if self.resource_key is not None: resp['archive_url'] = self.archive_url elif self.job_config.get('archive_url', None): resp['archive_url'] = self.job_config['archive_url'] return resp
def _tag_dict(tag): tag_info = { 'name': tag.name, 'reversion': tag.reversion, } if tag.lifetime_start_ts > 0: tag_info['start_ts'] = tag.lifetime_start_ts if tag.lifetime_end_ts > 0: tag_info['end_ts'] = tag.lifetime_end_ts # TODO: Remove this once fully on OCI data model. if tag.legacy_image_if_present: tag_info['docker_image_id'] = tag.legacy_image.docker_image_id tag_info['image_id'] = tag.legacy_image.docker_image_id tag_info['size'] = tag.legacy_image.aggregate_size # TODO: Remove this check once fully on OCI data model. if tag.manifest_digest: tag_info['manifest_digest'] = tag.manifest_digest if tag.manifest: tag_info['is_manifest_list'] = tag.manifest.is_manifest_list if tag.lifetime_start_ts > 0: last_modified = format_date( datetime.utcfromtimestamp(tag.lifetime_start_ts)) tag_info['last_modified'] = last_modified if tag.lifetime_end_ts is not None: expiration = format_date(datetime.utcfromtimestamp( tag.lifetime_end_ts)) tag_info['expiration'] = expiration return tag_info
def get(self, namespace, repository, tag, parsed_args): """ List the images for the specified repository tag. """ repo_ref = registry_model.lookup_repository(namespace, repository) if repo_ref is None: raise NotFound() tag_ref = registry_model.get_repo_tag(repo_ref, tag) if tag_ref is None: raise NotFound() if parsed_args["owned"]: # NOTE: This is deprecated, so we just return empty now. return {"images": []} manifest = registry_model.get_manifest_for_tag(tag_ref) if manifest is None: raise NotFound() legacy_image = registry_model.get_legacy_image( repo_ref, manifest.legacy_image_root_id, storage ) if legacy_image is None: raise NotFound() # NOTE: This is replicating our older response for this endpoint, but # returns empty for the metadata fields. This is to ensure back-compat # for callers still using the deprecated API, while not having to load # all the manifests from storage. return { "images": [ { "id": image_id, "created": format_date(datetime.utcfromtimestamp(tag_ref.lifetime_start_ts)), "comment": "", "command": "", "size": 0, "uploading": False, "sort_index": 0, "ancestors": "", } for image_id in legacy_image.full_image_id_chain ] }
def get(self, namespace, repository): """ List the images for the specified repository. """ repo_ref = registry_model.lookup_repository(namespace, repository) if repo_ref is None: raise NotFound() tags = registry_model.list_all_active_repository_tags(repo_ref) images_with_tags = defaultdict(list) for tag in tags: legacy_image_id = tag.manifest.legacy_image_root_id if legacy_image_id is not None: images_with_tags[legacy_image_id].append(tag) # NOTE: This is replicating our older response for this endpoint, but # returns empty for the metadata fields. This is to ensure back-compat # for callers still using the deprecated API, while not having to load # all the manifests from storage. return { "images": [{ "id": image_id, "created": format_date( datetime.utcfromtimestamp( (min([tag.lifetime_start_ts for tag in tags])))), "comment": "", "command": "", "size": 0, "uploading": False, "sort_index": 0, "tags": [tag.name for tag in tags], "ancestors": "", } for image_id, tags in images_with_tags.items()] }
def image_dict(image): parsed_command = None if image.command: try: parsed_command = json.loads(image.command) except (ValueError, TypeError): parsed_command = {"error": "Could not parse command"} image_data = { "id": image.docker_image_id, "created": format_date(image.created), "comment": image.comment, "command": parsed_command, "size": image.image_size, "uploading": False, "sort_index": 0, } image_data["ancestors"] = "/{0}/".format("/".join(image.ancestor_ids)) return image_data
def _layer_dict(manifest_layer, index): # NOTE: The `command` in the layer is either a JSON string of an array (schema 1) or # a single string (schema 2). The block below normalizes it to have the same format. command = None if manifest_layer.command: try: command = json.loads(manifest_layer.command) except (TypeError, ValueError): command = [manifest_layer.command] return { 'index': index, 'compressed_size': manifest_layer.compressed_size, 'is_remote': manifest_layer.is_remote, 'urls': manifest_layer.urls, 'command': command, 'comment': manifest_layer.comment, 'author': manifest_layer.author, 'blob_digest': str(manifest_layer.blob_digest), 'created_datetime': format_date(manifest_layer.created_datetime), }
def _layer_dict(manifest_layer, index): # NOTE: The `command` in the layer is either a JSON string of an array (schema 1) or # a single string (schema 2). The block below normalizes it to have the same format. command = None if manifest_layer.command: try: command = json.loads(manifest_layer.command) except (TypeError, ValueError): command = [manifest_layer.command] return { "index": index, "compressed_size": manifest_layer.compressed_size, "is_remote": manifest_layer.is_remote, "urls": manifest_layer.urls, "command": command, "comment": manifest_layer.comment, "author": manifest_layer.author, "blob_digest": str(manifest_layer.blob_digest), "created_datetime": format_date(manifest_layer.created_datetime), }