def toggle_feed_enabled(feed, enabled): if type(enabled) != bool: raise BadRequest(message="state must be a boolean", detail={"value": enabled}) try: provider = get_vulnerabilities_provider() feed = provider.update_feed_enabled_status(feed, enabled) if not feed: raise ResourceNotFound(feed, detail={}) return feed.to_json(), 200 except InvalidFeed: raise BadRequest( message="Feed not supported on configured vulnerability provider", detail={ "feed": feed, "configured_provider": provider.get_config_name() }, ) except Exception as e: log.error("Could not update feed enabled status") return jsonify(make_response_error(e, in_httpcode=500)), 500
def validate_digest_source(digest_source: dict, api_schema: dict) -> bool: schema = api_schema.get('definitions', {}).get(REGISTRY_DIGEST_SOURCE_SCHEMA_DEFINITION_NAME) try: jsonschema.validate(digest_source, schema) except jsonschema.ValidationError as e: raise BadRequest('Validation error', detail={'validation_error': '{}'.format(e)}) if not validate_pullstring_is_digest(digest_source['pullstring']): raise BadRequest('Must have digest-based pull string', detail={"invalid_value": digest_source['pullstring']}) if not validate_pullstring_is_tag(digest_source['tag']): raise BadRequest('Must have tag-based pull string', detail={"invalid_value": digest_source['tag']})
def toggle_group_enabled(feed, group, enabled): if type(enabled) != bool: raise BadRequest(message="state must be a boolean", detail={"value": enabled}) if feed == GRYPE_DB_FEED_NAME: raise HTTPNotImplementedError( message= "Enabling and disabling groups for grypedb feed is not currently supported.", detail={}, ) session = db.get_session() try: g = db.set_feed_group_enabled(session, feed, group, enabled) if not g: raise ResourceNotFound(group, detail={}) session.flush() grp = _marshall_group_response(g).to_json() session.commit() return jsonify(grp), 200 except AnchoreApiError: session.rollback() raise except Exception: log.error("Could not update feed group enabled status") session.rollback() raise
def get(self): with db.session_scope() as session: image_report = db_catalog_image.get(self.image_digest, self.account_id, session=session) if not image_report: raise ResourceNotFound("Image not found", detail=self.get_error_detail()) self.verify_analysis_status(image_report) image_content_data = self.get_image_content_data(self.image_digest) if self.content_type not in image_content_data: raise BadRequest( "image content of type (%s) was not an available type at analysis time for this image" % str(self.content_type), detail=self.get_error_detail(), ) if self.__normalize_to_user_format_on_load__: image_content_data = helpers.make_image_content_response( self.content_type, image_content_data[self.content_type]) return self.hydrate_additional_data(image_content_data, image_report)
def validate_archive_source(archive_source: dict, api_schema) -> bool: schema = api_schema.get('definitions', {}).get(REGISTRY_ARCHIVE_SOURCE_SCHEMA_DEFINITION_NAME) try: jsonschema.validate(archive_source, schema) except jsonschema.ValidationError as e: raise BadRequest('Validation error', detail={'validation_error': '{}'.format(e)})
def validate_tag_source(tag_source: dict, api_schema: dict) -> bool: """ This exists since the regular API validation doesn't apply to the normalized output. :param tag_source: :return: """ schema = api_schema.get('definitions', {}).get(REGISTRY_TAG_SOURCE_SCHEMA_DEFINITION_NAME) try: jsonschema.validate(tag_source, schema) except jsonschema.ValidationError as e: raise BadRequest('Validation error', detail={'validation_error': '{}'.format(e)}) if not validate_pullstring_is_tag(tag_source['pullstring']): raise BadRequest('Must have tag-based pull string', detail={"invalid_value": tag_source['pullstring']})
def create_analysis_archive_rule(rule): """ POST /archives/rules :return: """ try: with session_scope() as session: # Validate that only one system_global rule has max_images_per_account set if (rule.get("system_global", False) and rule.get("max_images_per_account", None) is not None): qry = session.query(ArchiveTransitionRule).filter( ArchiveTransitionRule.account == ApiRequestContextProxy.namespace(), ArchiveTransitionRule.system_global.is_(True), ArchiveTransitionRule.max_images_per_account.isnot(None), ) if qry.first() is not None: raise BadRequest( "A system_global Archive Transition Rule already exists with max_images_per_account set", {"existingRule": repr(qry.first())}, ) r = ArchiveTransitionRule() r.account = ApiRequestContextProxy.namespace() r.rule_id = uuid.uuid4().hex r.selector_registry = rule.get("selector", {}).get("registry", "*") r.selector_repository = rule.get("selector", {}).get("repository", "*") r.selector_tag = rule.get("selector", {}).get("tag", "*") r.analysis_age_days = int(rule.get("analysis_age_days", -1)) r.tag_versions_newer = int(rule.get("tag_versions_newer", -1)) r.transition = ArchiveTransitions(rule.get("transition")) r.system_global = rule.get("system_global", False) # Transition Rule Exclude information (defaults to NOT exclude things), but will supercede the selector # above exclude = rule.get("exclude", {}) exclude_selector = exclude.get("selector", {}) r.exclude_selector_registry = exclude_selector.get("registry", "") r.exclude_selector_repository = exclude_selector.get( "repository", "") r.exclude_selector_tag = exclude_selector.get("tag", "") r.exclude_expiration_days = exclude.get("expiration_days", -1) r.max_images_per_account = rule.get("max_images_per_account", None) session.add(r) session.flush() return transition_rule_db_to_json(r), 200 except Exception as ex: logger.exception("Exception in add") return ( make_response_error("Error adding rule: {}".format(ex), in_httpcode=500), 500, )
def validate_image_add_source(analysis_request_dict, api_schema): """ Validates the normalized ImageAnalysisRequest Schema (swagger.yaml) with semantic checks Raises exceptions on validation errors: BadRequest exceptions if the request has properties that don't make sense in combination or violate format checks ValueError if the input dict does not have the data expected to perform validation (e.g. needed to be normalized first) :param analysis_request_dict: the analysis request object :param api_schema: the schema dict for the api to base validation on :return: True on success """ source = analysis_request_dict.get("source") top_tag = analysis_request_dict.get("tag") top_digest = analysis_request_dict.get("digest") top_dockerfile = analysis_request_dict.get("dockerfile") if source: if top_digest is not None or top_tag is not None or top_dockerfile is not None: raise BadRequest( "Cannot use both source property and tag, digest, or dockerfile property at top level", detail={}, ) digest_source = source.get("digest") tag_source = source.get("tag") archive_source = source.get("archive") import_source = source.get("import") if digest_source: return validate_digest_source(digest_source, api_schema) elif tag_source: return validate_tag_source(tag_source, api_schema) elif archive_source: return validate_archive_source(archive_source, api_schema) elif import_source: return True else: raise BadRequest("Must have one source propery set", detail={}) else: raise ValueError('Expected a "source" property in the input dict')
def validate_digest_source(digest_source: dict, api_schema: dict) -> bool: schema = api_schema.get( "definitions", {}).get(REGISTRY_DIGEST_SOURCE_SCHEMA_DEFINITION_NAME) try: jsonschema.validate(digest_source, schema) except jsonschema.ValidationError as e: raise BadRequest("Validation error", detail={"validation_error": "{}".format(e)}) if not validate_pullstring_is_digest(digest_source["pullstring"]): raise BadRequest( "Must have digest-based pull string", detail={"invalid_value": digest_source["pullstring"]}, ) if not validate_pullstring_is_tag(digest_source["tag"]): raise BadRequest( "Must have tag-based pull string", detail={"invalid_value": digest_source["tag"]}, )
def toggle_feed_enabled(feed, enabled): if type(enabled) != bool: raise BadRequest(message='state must be a boolean', detail={'value': enabled}) session = db.get_session() try: f = db.set_feed_enabled(session, feed, enabled) if not f: raise ResourceNotFound(feed, detail={}) session.flush() updated = _marshall_feed_response(f) session.commit() return jsonify(updated), 200 except AnchoreApiError: session.rollback() raise except Exception as e: log.error('Could not update feed enabled status') session.rollback() return jsonify(make_response_error(e, in_httpcode=500)), 500
def toggle_group_enabled(feed, group, enabled): if type(enabled) != bool: raise BadRequest(message='state must be a boolean', detail={'value': enabled}) session = db.get_session() try: g = db.set_feed_group_enabled(session, feed, group, enabled) if not g: raise ResourceNotFound(group, detail={}) session.flush() grp = _marshall_group_response(g) session.commit() return jsonify(grp), 200 except AnchoreApiError: session.rollback() raise except Exception: log.error('Could not update feed group enabled status') session.rollback() raise
def add_image( image_metadata=None, tag=None, digest=None, created_at=None, from_archive=False, allow_dockerfile_update=False, ): try: if image_metadata is None: image_metadata = {} request_inputs = anchore_engine.apis.do_request_prep( connexion.request, default_params={ "tag": tag, "digest": digest, "created_at": created_at, "allow_dockerfile_update": allow_dockerfile_update, }, ) if image_metadata.get("import_operation_id") and from_archive: raise BadRequest( 'Cannot specify both "from_archive=True" query parameter and include an import manifest in the payload', detail={}, ) if from_archive: # Restore an image from the analysis archive into the working set task = archiver.RestoreArchivedImageTask( account=ApiRequestContextProxy.namespace(), image_digest=digest) task.start() request_inputs["params"] = {} request_inputs["method"] = "GET" with db.session_scope() as session: ( return_object, httpcode, ) = anchore_engine.services.catalog.catalog_impl.image_imageDigest( session, request_inputs, digest) elif image_metadata.get("import_manifest"): # Import an image from the upload API try: import_manifest = ImportManifest.from_json( image_metadata["import_manifest"]) except Exception as err: logger.debug_exception("Error unmarshalling manifest") # If we hit this, it means the swagger spec doesn't match the marshmallow scheme raise BadRequest(message="invalid import manifest", detail={"error": str(err)}) annotations = image_metadata.get("annotations", {}) # Don't accept an in-line dockerfile if image_metadata.get("dockerfile"): raise BadRequest( "Cannot provide dockerfile content directly in import payload. Use the import operation APIs to load the dockerfile before calling this endpoint", detail={}, ) with db.session_scope() as session: # allow_dockerfile_update is a poor proxy for the 'force' option return_object = anchore_engine.services.catalog.importer.import_image( session, account=ApiRequestContextProxy.namespace(), operation_id=import_manifest.operation_uuid, import_manifest=import_manifest, force=allow_dockerfile_update, annotations=annotations, ) httpcode = 200 else: # Regular image-add case: analyze from a registry with db.session_scope() as session: ( return_object, httpcode, ) = anchore_engine.services.catalog.catalog_impl.image( session, request_inputs, bodycontent=image_metadata) except AnchoreApiError: raise except ImageConflict as img_err: httpcode = 409 return_object = str(img_err) except Exception as err: logger.exception("Error processing image add") httpcode = 500 return_object = str(err) return return_object, httpcode
def normalize_image_add_source(analysis_request_dict): """ Normalizes the ImageAnalysisRequest-schema input request (validated already at API marshalling) into using the 'source' property instead of the deprecated 'tag', 'digest', and 'dockerfile' properties. Returns a new dict with the normalized request :param analysis_request_dict: :return: normalized request dict """ if not analysis_request_dict: raise ValueError("Invalid request object, must be a valid json object") normalized = copy.deepcopy(analysis_request_dict) if normalized.get("source"): # Already has a source, that should be validated return normalized source = {} digest = tag = dockerfile = created_at = None if "digest" in normalized: digest = normalized.pop("digest") if "tag" in normalized: tag = normalized.pop("tag") if "dockerfile" in normalized: dockerfile = normalized.pop("dockerfile") if "created_at" in normalized: created_at = normalized.pop("created_at") # use legacy fields and normalize to a source if digest: if DIGEST_REGEX.match(digest) is not None: # It's only a digest (e.g. sha256:abc), construct a pullstring if tag: parsed = parse_dockerimage_string(tag) digest_pullstring = (parsed["registry"] + "/" + parsed["repo"] + "@" + digest) else: raise ValueError( "For a digest-based analysis, the tag property must also be populated" ) else: # assume pull string, so no-op digest_pullstring = digest source["digest"] = { "pullstring": digest_pullstring, "tag": tag, "creation_timestamp_override": created_at, } if dockerfile: source["digest"]["dockerfile"] = dockerfile normalized["source"] = source elif tag: source["tag"] = {"pullstring": tag} if dockerfile: source["tag"]["dockerfile"] = dockerfile normalized["source"] = source else: raise BadRequest( 'Must include either "tag", "tag" and "digest", or "source" property in body', detail={}, ) return normalized