def delete_analysis_archive_rule(ruleId): """ DELETE /archives/rules/{ruleId} :param ruleId: :return: """ client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace()) try: resp1 = handle_proxy_response( client.delete_analysis_archive_rule(ruleId)) if resp1[1] == 404 and ApiRequestContextProxy.namespace( ) != ADMIN_ACCOUNT_NAME: # Yes, this is a bit ugly # Get the rule, check if a global rule and adjust error code appropriately try: c2 = internal_client_for(CatalogClient, ADMIN_ACCOUNT_NAME) r2 = c2.get_analysis_archive_rule(ruleId) if r2 and r2.get('system_global', False): return make_response_error( 'Non-admins cannot modify/delete system global rules', in_httpcode=403), 403 except Exception as ex: pass return resp1 except Exception as ex: return handle_proxy_response(ex)
def create_archive(bucket, archiveid, bodycontent): httpcode = 500 try: accountName = ApiRequestContextProxy.namespace() archive_sys = archive.get_manager() try: jsonbytes = anchore_utils.ensure_bytes(json.dumps(bodycontent)) my_svc = ApiRequestContextProxy.get_service() if my_svc is not None: resource_url = (my_svc.service_record["base_url"] + "/" + my_svc.service_record["version"] + "/archive/" + bucket + "/" + archiveid) else: resource_url = "N/A" rc = archive_sys.put(accountName, bucket, archiveid, jsonbytes) return_object = resource_url httpcode = 200 except Exception as err: httpcode = 500 raise err except Exception as err: return_object = anchore_engine.common.helpers.make_response_error( err, in_httpcode=httpcode) return return_object, httpcode
def inner_wrapper(*args, **kwargs): try: with Yosai.context(self._yosai): # Context Manager functions try: try: identity = self.authenticate(request_proxy) if not identity.username: raise UnauthenticatedError('Authentication Required') except: raise UnauthenticatedError('Authentication Required') ApiRequestContextProxy.set_identity(identity) if self._check_account(identity.user_account, identity.user_account_type, with_names, with_types): return f(*args, **kwargs) finally: # Teardown the request context ApiRequestContextProxy.set_identity(None) except UnauthorizedAccountError as ex: return make_response_error(str(ex), in_httpcode=403), 403 except UnauthenticatedError as ex: return Response(response='Unauthorized', status=401, headers=[('WWW-Authenticate', 'basic realm="Authentication required"')]) except Exception as ex: logger.exception('Unexpected exception: {}'.format(ex)) return make_response_error('Internal error', in_httpcode=500), 500
def get_analysis_archive_rule(ruleId): """ GET /archives/rules/{ruleId} :param ruleId: :return: """ client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace()) try: resp1 = handle_proxy_response(client.get_analysis_archive_rule(ruleId)) if resp1[1] == 404 and ApiRequestContextProxy.namespace( ) != ADMIN_ACCOUNT_NAME: # Yes, this is a bit ugly # Get the rule, check if a global rule try: c2 = internal_client_for(CatalogClient, ADMIN_ACCOUNT_NAME) r2 = handle_proxy_response( c2.get_analysis_archive_rule(ruleId)) if r2 and r2[1] == 200 and r2[0].get('system_global', False): # Allow it return handle_proxy_response(r2) except Exception as ex: pass return resp1 except Exception as ex: return handle_proxy_response(ex)
def query_images_by_package_get(name=None, version=None, package_type=None): try: request_inputs = anchore_engine.apis.do_request_prep( connexion.request, default_params={ 'name': name, 'version': version, 'package_type': package_type }) client = internal_client_for(PolicyEngineClient, userId=ApiRequestContextProxy.namespace()) logger.info('Params for image by_package: {}'.format(request_inputs)) return_object = client.query_images_by_package( user_id=ApiRequestContextProxy.namespace(), name=request_inputs.get('params', {}).get('name'), version=request_inputs.get('params', {}).get('version'), package_type=request_inputs.get('params', {}).get('package_type')) httpcode = 200 except Exception as err: logger.exception( 'Error dispatching/receiving request from policy engine for image query by package' ) httpcode = 500 return_object = str(err) return (return_object, httpcode)
def query_images_by_vulnerability_get(vulnerability_id=None, severity=None, namespace=None, affected_package=None, vendor_only=True): try: request_inputs = anchore_engine.apis.do_request_prep( connexion.request, default_params={ 'vulnerability_id': vulnerability_id, 'severity': severity, 'namespace': namespace, 'affected_package': affected_package, 'vendor_only': vendor_only }) client = internal_client_for(PolicyEngineClient, userId=ApiRequestContextProxy.namespace()) return_object = client.query_images_by_vulnerability( user_id=ApiRequestContextProxy.namespace(), vulnerability_id=request_inputs.get('params', {}).get('vulnerability_id'), severity=request_inputs.get('params', {}).get('severity'), namespace=request_inputs.get('params', {}).get('namespace'), affected_package=request_inputs.get('params', {}).get('affected_package'), vendor_only=request_inputs.get('params', {}).get('vendor_only')) httpcode = 200 except Exception as err: httpcode = 500 return_object = str(err) return (return_object, httpcode)
def list_analysis_archive_rules(system_global=True): """ GET /archives/rules :return: """ try: with session_scope() as session: if system_global: qry = session.query(ArchiveTransitionRule).filter( or_( ArchiveTransitionRule.account == ApiRequestContextProxy.namespace(), ArchiveTransitionRule.system_global == True, ) ) return [transition_rule_db_to_json(x) for x in qry], 200 else: return [ transition_rule_db_to_json(x) for x in session.query(ArchiveTransitionRule).filter_by( account=ApiRequestContextProxy.namespace() ) ], 200 except Exception as ex: return make_response_error(ex, in_httpcode=500), 500
def delete_archived_analysis(imageDigest, force=False): """ DELETE /archives/images/{digest} :param imageDigest: image digest to delete the archive for :return: """ # # Read the archive manifest, and delete or restore the artifacts try: if force: start_statuses = ['archiving', 'archived', 'deleting', 'deleted'] else: start_statuses = ['archived', 'deleting'] with session_scope() as session: resp = db_archived_images.update_image_status(session, ApiRequestContextProxy.namespace(), imageDigest, start_statuses, 'deleting') if resp is None: return make_response_error('Not found in archive', in_httpcode=404), 404 except Exception as ex: logger.exception("Error deleting archive for image {}/{}".format(ApiRequestContextProxy.namespace(), imageDigest)) return make_response_error('Invalid object state: {}'.format(ex), in_httpcode=400), 400 try: task = DeleteArchivedImageTask(account=ApiRequestContextProxy.namespace(), image_digest=imageDigest) task.run() resp = None return resp, 200 except Exception as ex: logger.exception('Failed deleting archived image') return make_response_error('Error deleting image archive: {}'.format(ex), in_httpcode=500), 500
def create_object(bucket, archiveid, bodycontent): httpcode = 500 try: account_name = ApiRequestContextProxy.namespace() obj_mgr = anchore_engine.subsys.object_store.manager.get_manager() jsonbytes = anchore_utils.ensure_bytes(json.dumps(bodycontent)) rc = obj_mgr.put(account_name, bucket, archiveid, jsonbytes) my_svc = ApiRequestContextProxy.get_service() if my_svc is not None: resource_url = my_svc.service_record[ 'base_url'] + "/" + my_svc.service_record[ 'version'] + "/archive/" + bucket + "/" + archiveid else: resource_url = "N/A" return_object = resource_url httpcode = 200 except Exception as err: return_object = anchore_engine.common.helpers.make_response_error( err, in_httpcode=httpcode) return return_object, httpcode
def delete_image(user_id, image_id): """ DELETE the image and all resources for it. Returns 204 - No Content on success :param user_id: :param image_id: :return: """ db = get_session() try: log.info( "Deleting image {}/{} and all associated resources".format( user_id, image_id ) ) img = db.query(Image).get((image_id, user_id)) if img: get_vulnerabilities_provider().delete_image_vulnerabilities( image=img, db_session=db ) try: conn_timeout = ApiRequestContextProxy.get_service().configuration.get( "catalog_client_conn_timeout", DEFAULT_CACHE_CONN_TIMEOUT ) read_timeout = ApiRequestContextProxy.get_service().configuration.get( "catalog_client_read_timeout", DEFAULT_CACHE_READ_TIMEOUT ) mgr = EvaluationCacheManager( img, None, None, conn_timeout, read_timeout ) mgr.flush() except Exception as ex: log.exception( "Could not delete evaluations for image {}/{} in the cache. May be orphaned".format( user_id, image_id ) ) db.delete(img) db.commit() else: db.rollback() # Idempotently return 204. This isn't properly RESTY, but idempotency on delete makes clients much cleaner. return None, 204 except HTTPException: raise except Exception as e: log.exception( "Error processing DELETE request for image {}/{}".format(user_id, image_id) ) db.rollback() return ( make_response_error( "Error deleting image {}/{}: {}".format(user_id, image_id, e), in_httpcode=500, ), 500, )
def inner_wrapper(*args, **kwargs): try: with Yosai.context(self._yosai): # Context Manager functions try: try: identity = self.authenticate(request_proxy) if not identity.username: raise UnauthenticatedError('Authentication Required') except: raise UnauthenticatedError('Authentication Required') ApiRequestContextProxy.set_identity(identity) permissions_final = [] # Bind all the permissions as needed for perm in permission_s: domain = perm.domain if perm.domain else '*' action = perm.action if perm.action else '*' target = perm.target if perm.target else '*' if hasattr(domain, 'bind'): domain.bind(operation=f, kwargs=kwargs) domain = domain.value if hasattr(action, 'bind'): action.bind(operation=f, kwargs=kwargs) action = action.value if hasattr(target, 'bind'): target.bind(operation=f, kwargs=kwargs) target = target.value #permissions_final.append(':'.join([domain, action, target])) permissions_final.append(Permission(domain, action, target)) # Do the authz on the bound permissions try: self.authorize(ApiRequestContextProxy().identity(), permissions_final) except UnauthorizedError as ex: raise ex except Exception as e: logger.exception('Error doing authz: {}'.format(e)) raise UnauthorizedError(permissions_final) return f(*args, **kwargs) finally: # Teardown the request context ApiRequestContextProxy.set_identity(None) except UnauthorizedError as ex: return make_response_error(str(ex), in_httpcode=403), 403 except UnauthenticatedError as ex: return Response(response='Unauthorized', status=401, headers=[('WWW-Authenticate', 'basic realm="Authentication required"')]) except AnchoreApiError: raise except Exception as ex: logger.exception('Unexpected exception: {}'.format(ex)) return make_response_error('Internal error', in_httpcode=500), 500
def create_analysis_archive_rule(rule): """ POST /archives/rules :return: """ try: with session_scope() as session: # Validate that only one system_global rule has max_images_per_account set if (rule.get("system_global", False) and rule.get("max_images_per_account", None) is not None): qry = session.query(ArchiveTransitionRule).filter( ArchiveTransitionRule.account == ApiRequestContextProxy.namespace(), ArchiveTransitionRule.system_global.is_(True), ArchiveTransitionRule.max_images_per_account.isnot(None), ) if qry.first() is not None: raise BadRequest( "A system_global Archive Transition Rule already exists with max_images_per_account set", {"existingRule": repr(qry.first())}, ) r = ArchiveTransitionRule() r.account = ApiRequestContextProxy.namespace() r.rule_id = uuid.uuid4().hex r.selector_registry = rule.get("selector", {}).get("registry", "*") r.selector_repository = rule.get("selector", {}).get("repository", "*") r.selector_tag = rule.get("selector", {}).get("tag", "*") r.analysis_age_days = int(rule.get("analysis_age_days", -1)) r.tag_versions_newer = int(rule.get("tag_versions_newer", -1)) r.transition = ArchiveTransitions(rule.get("transition")) r.system_global = rule.get("system_global", False) # Transition Rule Exclude information (defaults to NOT exclude things), but will supercede the selector # above exclude = rule.get("exclude", {}) exclude_selector = exclude.get("selector", {}) r.exclude_selector_registry = exclude_selector.get("registry", "") r.exclude_selector_repository = exclude_selector.get( "repository", "") r.exclude_selector_tag = exclude_selector.get("tag", "") r.exclude_expiration_days = exclude.get("expiration_days", -1) r.max_images_per_account = rule.get("max_images_per_account", None) session.add(r) session.flush() return transition_rule_db_to_json(r), 200 except Exception as ex: logger.exception("Exception in add") return ( make_response_error("Error adding rule: {}".format(ex), in_httpcode=500), 500, )
def query_images_by_package(name=None, version=None, package_type=None, page=1, limit=None): request_inputs = anchore_engine.apis.do_request_prep(request, default_params={ 'name': name, 'version': version, 'package_type': package_type, 'page': page, 'limit': limit }) method = request_inputs['method'] bodycontent = request_inputs['bodycontent'] params = request_inputs.get('params', {}) return_object = {} httpcode = 500 try: policy_engine_call_time = 0.0 try: result = anchore_engine.common.pagination.get_cached_pagination( query_digest=request_inputs['pagination_query_digest']) except Exception as err: client = internal_client_for(PolicyEngineClient, ApiRequestContextProxy.namespace()) timer = time.time() pe_result = client.query_images_by_package( user_id=ApiRequestContextProxy.namespace(), name=params.get('name'), version=params.get('version'), package_type=params.get('package_type')) policy_engine_call_time = time.time() - timer result = pe_result.get('matched_images', []) return_object = anchore_engine.common.pagination.make_response_paginated_envelope( result, envelope_key='images', page=page, limit=limit, dosort=True, sortfunc=lambda x: x['image']['imageDigest'], pagination_func=anchore_engine.common.pagination. do_cached_pagination, query_digest=request_inputs['pagination_query_digest'], ttl=max(30.0, policy_engine_call_time)) httpcode = 200 except Exception as err: return_object = anchore_engine.common.helpers.make_response_error( err, in_httpcode=httpcode) httpcode = return_object['httpcode'] return return_object, httpcode
def inline_authz(self, permission_s: list): """ Non-decorator impl of the @requires() decorator for isolated and inline invocation. Returns None on success or raises an exception :param permission_s: list of Permission objects :return: """ try: with Yosai.context(self._yosai): # Context Manager functions try: try: identity = self.authenticate(request_proxy) if not identity.username: raise UnauthenticatedError( 'Authentication Required') except: raise UnauthenticatedError('Authentication Required') ApiRequestContextProxy.set_identity(identity) permissions_final = [] # Bind all the permissions as needed for perm in permission_s: domain = perm.domain if perm.domain else '*' action = perm.action if perm.action else '*' target = perm.target if perm.target else '*' permissions_final.append( Permission(domain, action, target)) # Do the authz on the bound permissions try: self.authorize(ApiRequestContextProxy.identity(), permissions_final) except UnauthorizedError as ex: raise ex except Exception as e: logger.exception('Error doing authz: {}'.format(e)) raise UnauthorizedError(permissions_final) return None finally: # Teardown the request context ApiRequestContextProxy.set_identity(None) except UnauthorizedError as ex: return make_response_error(str(ex), in_httpcode=403), 403 except UnauthenticatedError as ex: return Response(response='Unauthorized', status=401, headers=[('WWW-Authenticate', 'basic realm="Authentication required"') ]) except Exception as ex: logger.exception('Unexpected exception: {}'.format(ex)) return make_response_error('Internal error', in_httpcode=500), 500
def get_image_content(image_digest, content_type): httpcode = 500 try: return_object = ApiRequestContextProxy.get_service().get_image_content( ApiRequestContextProxy.namespace(), content_type, image_digest) httpcode = 200 except Exception as err: logger.exception("Failed to lookup image content") return_object = make_response_error(err, in_httpcode=httpcode) httpcode = return_object["httpcode"] return return_object, httpcode
def get_analysis_archive_rule(rule_id): """ GET /archives/rules/{rule_id} :param rule_id: :return: """ try: with session_scope() as session: rule = ( session.query(ArchiveTransitionRule) .filter_by(account=ApiRequestContextProxy.namespace(), rule_id=rule_id) .one_or_none() ) if rule is None: # Allow users to get the system global rules rule = ( session.query(ArchiveTransitionRule) .filter_by(rule_id=rule_id, system_global=True) .one_or_none() ) if rule is None: return make_response_error("Rule not found", in_httpcode=404), 404 return transition_rule_db_to_json(rule), 200 except Exception as ex: return make_response_error(ex, in_httpcode=500), 500
def invalidate_operation(operation_id): """ DELETE /imports/images/{operation_id} :param operation_id: :return: """ try: with session_scope() as db_session: record = (db_session.query(ImageImportOperation).filter_by( account=ApiRequestContextProxy.namespace(), uuid=operation_id).one_or_none()) if record: if record.status not in [ ImportState.invalidated, ImportState.complete, ImportState.processing, ]: record.status = ImportState.invalidated db_session.flush() resp = record.to_json() else: raise api_exceptions.ResourceNotFound(resource=operation_id, detail={}) return resp, 200 except Exception as ex: return make_response_error(ex, in_httpcode=500), 500
def query_vulnerabilities_get(id=None, affected_package=None, affected_package_version=None): try: request_inputs = anchore_engine.apis.do_request_prep( connexion.request, default_params={ 'id': id, 'affected_package': affected_package, 'affected_package_version': affected_package_version }) client = internal_client_for(PolicyEngineClient, userId=ApiRequestContextProxy.namespace()) resp = client.query_vulnerabilities( vuln_id=request_inputs.get('params', {}).get('id'), affected_package=request_inputs.get('params', {}).get('affected_package'), affected_package_version=request_inputs.get( 'params', {}).get('affected_package_version')) code = 200 except Exception as err: logger.exception( 'Error dispatching/receiving request from policy engine for vulnerability query' ) resp = str(err) code = 500 return resp, code
def list_archives(): """ GET /archives :return: """ try: with session_scope() as session: imgs = db_archived_images.summarize(session) or [] rules = (session.query(ArchiveTransitionRule).filter_by( account=ApiRequestContextProxy.namespace()).all() or []) rule_count = len(rules) newest = None if rule_count > 0: newest = epoch_to_rfc3339( max(map(lambda x: x.last_updated, rules))) return { "images": imgs, "rules": { "count": rule_count, "last_updated": newest } } except Exception as ex: logger.exception("Failed to list archives") return make_response_error(ex, in_httpcode=500), 500
def validate_schema(notification): """ Check if the notification conforms to the Schema outlined in the Swagger Spec. Also only do this for the types we know (policy_eval, vuln_update, tag_update, analysis_update) :param notification: notification object to deliver """ ret = False notification_type = notification.get("data", {}).get("notification_type", None) if notification_type not in NOTIFICATION_MAPPING.keys(): logger.debug( "Not doing Schema validation for Notification Type: {}".format( notification_type)) return ret elif not notification_type: logger.warn("Notification Type not resolved: {}".format(notification)) return ret notification_schema_definition = NOTIFICATION_MAPPING.get( notification_type, "NotificationBase") spec = ApiRequestContextProxy.get_service().api_spec schema = spec.get("definitions", {}).get(notification_schema_definition) try: jsonschema.validate(notification, schema) ret = True except jsonschema.ValidationError as e: logger.error( "Notification does not pass validation, still delivering for backwards compatibility: {}" .format(e)) ret = False return ret
def get_services_by_name_and_host(servicename, hostid): """ GET /system/services/<servicename>/<hostid> :param request_inputs: :param servicename: :param hostid: :return: """ request_inputs = anchore_engine.apis.do_request_prep(request, default_params={}) user_auth = request_inputs['auth'] params = request_inputs['params'] return_object = [] httpcode = 500 try: client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace()) service_records = client.get_service(servicename=servicename, hostid=hostid) for service_record in service_records: return_object.append( make_response_service(user_auth, service_record, params)) httpcode = 200 except Exception as err: return_object = anchore_engine.common.helpers.make_response_error( err, in_httpcode=httpcode) httpcode = return_object['httpcode'] return return_object, httpcode
def add_image(image_metadata=None, tag=None, digest=None, created_at=None, from_archive=False, allow_dockerfile_update=False): try: if image_metadata is None: image_metadata = {} request_inputs = anchore_engine.apis.do_request_prep(connexion.request, default_params={'tag': tag, 'digest': digest, 'created_at': created_at, 'allow_dockerfile_update': allow_dockerfile_update}) if from_archive: task = archiver.RestoreArchivedImageTask(account=ApiRequestContextProxy.namespace(), image_digest=digest) task.start() request_inputs['params'] = {} request_inputs['method'] = 'GET' with db.session_scope() as session: return_object, httpcode = anchore_engine.services.catalog.catalog_impl.image_imageDigest(session, request_inputs, digest) else: with db.session_scope() as session: return_object, httpcode = anchore_engine.services.catalog.catalog_impl.image(session, request_inputs, bodycontent=image_metadata) except Exception as err: logger.exception('Error processing image add') httpcode = 500 return_object = str(err) return (return_object, httpcode)
def add_credential(credential): """ POST /user/credentials The same logic as /users/{userId}/credentials, but gets the userId from the auth context rather than path. This is for use by regular (non-admin) users to update their own credentials. :param credential: :return: credential json object """ try: if credential['type'] != UserAccessCredentialTypes.password.value: return make_response_error('Invalid credential type', in_httpcode=400), 400 else: cred_type = UserAccessCredentialTypes(credential['type']) with session_scope() as session: mgr = identities.manager_factory.for_session(session) user = ApiRequestContextProxy.identity().username result = mgr.add_user_credential(username=user, credential_type=cred_type, value=credential['value']) return credential_db_to_msg(result), 200 except Exception as ex: logger.exception('API Error') return make_response_error(errmsg=str(ex), in_httpcode=500), 500
def ping(): """ GET / :return: 200 status with api version string """ return ApiRequestContextProxy.get_service().__service_api_version__, 200
def create_analysis_archive_rule(rule): """ POST /archives/rules :return: """ try: with session_scope() as session: r = ArchiveTransitionRule() r.account = ApiRequestContextProxy.namespace() r.rule_id = uuid.uuid4().hex r.selector_registry = rule.get('selector', {}).get('registry', '*') r.selector_repository = rule.get('selector', {}).get('repository', '*') r.selector_tag = rule.get('selector', {}).get('tag', '*') r.analysis_age_days = int(rule.get('analysis_age_days', -1)) r.tag_versions_newer = int(rule.get('tag_versions_newer', -1)) r.transition = ArchiveTransitions(rule.get('transition')) r.system_global = rule.get('system_global', False) session.add(r) session.flush() return transition_rule_db_to_json(r), 200 except Exception as ex: logger.exception('Exception in add') return make_response_error('Error adding rule: {}'.format(ex), in_httpcode=500), 500
def content_upload(operation_id, content_type, request): """ Generic handler for multiple types of content uploads. Still operates at the API layer :param operation_id: :param content_type: :param request: :return: """ try: client = internal_client_for( CatalogClient, userId=ApiRequestContextProxy.namespace() ) return ( client.upload_image_import_content( operation_id, content_type, request.data ), 200, ) except api_exceptions.AnchoreApiError as ex: return ( make_response_error(ex, in_httpcode=ex.__response_code__), ex.__response_code__, ) except Exception as ex: logger.exception("Unexpected error in api processing") return make_response_error(ex, in_httpcode=500), 500
def archive_image_analysis(imageReferences): """ POST /archives/images body = [digest1, digest2, ... ] """ try: if not imageReferences or len(imageReferences) > 100: return make_response_error('Bad Request. Must include a list of digests between 1 and 100 entries long', in_httpcode=400), 400 results = [] for digest in imageReferences: try: # Do synchronous part to start the state transition task = ArchiveImageTask(account=ApiRequestContextProxy.namespace(), image_digest=digest) result_status, result_detail = task.run() results.append({'digest': task.image_digest, 'status': result_status, 'detail': result_detail}) except Exception as ex: logger.exception('Unexpected an uncaught exception from the archive task execution') results.append({'digest': digest, 'status': 'error', 'detail': str(ex)}) return results, 200 except Exception as err: logger.exception('Error processing image add') return make_response_error(err, in_httpcode=500), 500
def get_object_from_storage(bucket: str, archiveid: str, is_json: bool = False): """ Gets an object from object storage Has condition to return it as json or not This is used by two endpoints so that a single endpoint does not return multiple different content types """ http_code = 200 try: obj_mgr = anchore_engine.subsys.object_store.manager.get_manager() account_name = ApiRequestContextProxy.namespace() return_object = obj_mgr.get(account_name, bucket, archiveid) if not return_object: http_code = 404 return_object = anchore_engine.common.helpers.make_response_error( "No document found at given path", in_httpcode=http_code) elif is_json: return_object = json.loads(return_object) except Exception as err: http_code = 500 return_object = anchore_engine.common.helpers.make_response_error( err, in_httpcode=http_code) return return_object, http_code
def action_provider(op_id): """ Lazy lookup of the action associated with the operation id via the request context reference to the parent service, which provides the map of ops->actions (via the swagger doc) :param op_id: :return: """ return ApiRequestContextProxy.get_service().action_for_operation(op_id)
def save_import_content(db_session, operation_id: str, content: bytes, content_type: str) -> tuple: """ Generic handler for content type saving that does not do any validation. :param operation_id: :param sbom: :return: """ hasher = sha256(content) # Direct bytes hash digest = hasher.digest().hex() found_content = (db_session.query(ImageImportContent).filter( ImageImportContent.operation_id == operation_id, ImageImportContent.content_type == content_type, ImageImportContent.digest == digest, ).one_or_none()) if found_content: logger.info("Found existing record {}".format(found_content.digest)) # Short circuit since already present return found_content.digest, found_content.created_at import_bucket = generate_import_bucket() key = generate_key(ApiRequestContextProxy.namespace(), operation_id, content_type, digest) content_record = ImageImportContent() content_record.account = ApiRequestContextProxy.namespace() content_record.digest = digest content_record.content_type = content_type content_record.operation_id = operation_id content_record.content_storage_bucket = import_bucket content_record.content_storage_key = key db_session.add(content_record) db_session.flush() mgr = manager.object_store.get_manager() resp = mgr.put_document(ApiRequestContextProxy.namespace(), import_bucket, key, ensure_str(content)) if not resp: # Abort the transaction raise Exception("Could not save into object store") return digest, content_record.created_at