async def test_get_logout_response(user): response = Response() logout_response = await cookie_authentication.get_logout_response( user, response) # We shouldn't return directly the response # so that FastAPI can terminate it properly assert logout_response is None cookies = [ header for header in response.raw_headers if header[0] == b"set-cookie" ] assert len(cookies) == 1 cookie = cookies[0][1].decode("latin-1") assert "Max-Age=0" in cookie
def get_flows(ip_address: str, timeframe: int): """A function to get recent flows from Stealthwatch""" # Return HTTP 204 if not configured if (not os.getenv("STEALTHWATCH_API_ADDRESS") or not os.getenv("STEALTHWATCH_API_USERNAME") or not os.getenv("STEALTHWATCH_API_PASSWORD")): raise HTTPException(status_code=501, detail="Module not configured") # Build the API URL api_url = "https://{}/smc/swsService/flows".format( os.getenv("STEALTHWATCH_API_ADDRESS")) # Change the number of hours to milliseconds for Stealthwatch duration = timeframe * 60 * 60 * 1000 # Get the XML that we'll send to Stealthwatch xml = _get_stealthwatch_flows_xml(duration, ip_address) # Send the request to Stealthwatch http_request = requests.post(api_url, auth=HTTPBasicAuth( os.getenv("STEALTHWATCH_API_USERNAME"), os.getenv("STEALTHWATCH_API_PASSWORD")), data=xml, verify=False) # Check to make sure the POST was successful if http_request.status_code == 200: response = xmltodict.parse( http_request.text)['soapenc:Envelope']['soapenc:Body'] if response['getFlowsResponse']['flow-list']: # Return JSON formatted flows return response else: # If Stealthwatch returns nothing, return a 204 return Response(status_code=204) else: print( 'Stealthwatch Connection Failure - HTTP Return Code: {}\nResponse: {}' .format(http_request.status_code, http_request.text)) exit()
async def delete_bucket( bucket: str, authorization: Optional[str] = Header(None), x_amz_expected_bucket_owner: Optional[str] = None, ): account_id = simple_aws_account_id(authorization) if model.is_existing_bucket(bucket, account_id): if model.is_bucket_owner(bucket, account_id) and model.is_bucket_empty( bucket, account_id): print(f"Deleting bucket {bucket}") if model.delete_bucket(bucket, account_id): return Response( headers={ "x-amz-id-2": "OpaqueString", "x-amz-request-id": "OpaqueRequestId", }) else: raise S3ApiException( dict2xml({ "Error": { "Code": "InternalError", "Message": f"Failed to delete bucket {bucket}", } }), 503, ) else: raise S3ApiException( dict2xml({ "Error": { "Code": "AccessDenied", "Message": f"Access denied", } }), 403, ) raise S3ApiException( dict2xml({ "Error": { "Code": "NoSuchBucket", "Message": f"Bucket {bucket} does not exist", } }), 404, )
async def add( table: str = Body(..., embed=True, title="Name of the table"), item: Union[dict, List[dict]] = Body(..., embed=True, title="List of items to add"), _: UserModel = Depends(authentication.verify()), ): """ Returns the results of the query """ try: if isinstance(item, dict): rds_handler.add(table, item) else: rds_handler.add_batch(table, item) return Response(status_code=200) except Exception as e: raise HTTPException(status_code=400, detail=str(e))
def delete_schedules( project: str, auth_verifier: deps.AuthVerifierDep = Depends(deps.AuthVerifierDep), db_session: Session = Depends(deps.get_db_session), ): schedules = get_scheduler().list_schedules( db_session, project, ) mlrun.api.utils.clients.opa.Client().query_project_resources_permissions( mlrun.api.schemas.AuthorizationResourceTypes.schedule, schedules.schedules, lambda schedule: (schedule.project, schedule.name), mlrun.api.schemas.AuthorizationAction.delete, auth_verifier.auth_info, ) get_scheduler().delete_schedules(db_session, project) return Response(status_code=HTTPStatus.NO_CONTENT.value)
async def multipart_put( env: str, key: str, uploadId: str, partNumber: int, request: Request ): reader = RequestReader.get_reader(request) env_obj = get_environment(env) async with s3_client(profile=env_obj.aws_profile) as s3: response = await s3.upload_part( Body=reader, Bucket=env_obj.bucket, Key=key, PartNumber=partNumber, UploadId=uploadId, ContentMD5=request.headers["Content-MD5"], ContentLength=int(request.headers["Content-Length"]), ) return Response(headers={"ETag": response["ETag"]})
async def add_datasource(request: Request, call_next): """ Attach the data source to the request.state. """ # Retrieve the datas ource from query param. source = data_source(request.query_params.get("source", default="jhu")) # Abort with 404 if source cannot be found. if not source: return Response("The provided data-source was not found.", status_code=404) # Attach source to request. request.state.source = source # Move on... LOGGER.debug(f"source provided: {source.__class__.__name__}") response = await call_next(request) return response
async def twilio_write( request: Request, From: str = Form(...), Body: str = Form(...), db: Session = Depends(get_db), ): store_request(db=db, req=request, new_message=Body, phone=From) # Twilio integration. Respond to messages # Some sec could be added here by checking the headers for a twilio signature against the origin url for a given account # more info on https://www.twilio.com/blog/build-secure-twilio-webhook-python-fastapi from twilio.twiml.messaging_response import MessagingResponse response = MessagingResponse() response.message( f"Welcome to Pablo's Avocados - We have confirmed your appointment. We will contact you at {From}. The words you used, namely '{Body}', don't have any creativity or rythm whatsoever but have a good day nonetheless" ) return Response(content=str(response), media_type="application/xml")
def terra_trait_daily( selectedDay: int = 12, selectedTrait: str = 'canopy_height', ): # this is a mini version of the data file that is quick to read and write here data_filename = 'trait_explorer/s4_height_and_models.csv' traits_df = pd.read_csv(data_filename) # find the field boundaries of the data dynamically. It would be faster to hardcode this maxColumn = traits_df.describe().loc['max', 'column'] maxRange = traits_df.describe().loc['max', 'range'] # run the extraction of the trait values across the field at or as soon before the reqested day as possible plotdf = renderCanopyHeightOnDay(traits_df, maxRange, maxColumn, selectedDay, selectedTrait) csvContent = plotdf.to_csv(index=False) return Response(content=csvContent, media_type='text/csv')
async def create_or_patch(request: Request, project: str, endpoint_id: str, model_endpoint: ModelEndpoint) -> Response: """ Either create or updates the kv record of a given ModelEndpoint object """ access_key = get_access_key(request.headers) if project != model_endpoint.metadata.project: raise MLRunConflictError( f"Can't store endpoint of project {model_endpoint.metadata.project} into project {project}" ) if endpoint_id != model_endpoint.metadata.uid: raise MLRunConflictError( f"Mismatch between endpoint_id {endpoint_id} and ModelEndpoint.metadata.uid {model_endpoint.metadata.uid}." f"\nMake sure the supplied function_uri, and model are configured as intended" ) await ModelEndpoints.create_or_patch(access_key=access_key, model_endpoint=model_endpoint) return Response(status_code=HTTPStatus.NO_CONTENT.value)
def json_or_msgpack(request, content, expires=None, headers=None): media_type = resolve_media_type(request) with record_timing(request.state.metrics, "tok"): etag = md5(str(content).encode()).hexdigest() headers = headers or {} headers["ETag"] = etag if expires is not None: headers["Expires"] = expires.strftime(HTTP_EXPIRES_HEADER_FORMAT) if request.headers.get("If-None-Match", "") == etag: # If the client already has this content, confirm that. return Response(status_code=304, headers=headers) if media_type == "application/x-msgpack": return MsgpackResponse(content, headers=headers, metrics=request.state.metrics) return NumpySafeJSONResponse(content, headers=headers, metrics=request.state.metrics)
def get_result_set( *, db_session: Session = Depends(deps.db_session), result_set_crud: CRUDResultSet = Depends(deps.result_set_crud), result_set_id: str, result_format: schemas.ResultSetFormat = schemas.ResultSetFormat.json, ) -> Any: """Get a ResultSet by id""" try: result_set = result_set_crud.get(db_session, result_set_id=result_set_id) except ResultSetNotFound as ex: raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(ex)) from ex if result_format == schemas.ResultSetFormat.csv: return Response(content=result_set.to_api_schema().to_csv(), media_type="text/csv") return result_set
def create_schedule( project: str, schedule: schemas.ScheduleInput, auth_verifier: deps.AuthVerifier = Depends(deps.AuthVerifier), db_session: Session = Depends(deps.get_db_session), ): get_scheduler().create_schedule( db_session, auth_verifier.auth_info, project, schedule.name, schedule.kind, schedule.scheduled_object, schedule.cron_trigger, labels=schedule.labels, concurrency_limit=schedule.concurrency_limit, ) return Response(status_code=HTTPStatus.CREATED.value)
async def get_archive_digest(genome: str = g, asset: str = a, tag: Optional[str] = tq): """ Returns the archive digest. Requires genome name asset name and tag name as an input. """ tag = tag or DEFAULT_TAG try: return Response( content=rgc[CFG_GENOMES_KEY][genome][CFG_ASSETS_KEY][asset] [CFG_ASSET_TAGS_KEY][tag][CFG_ARCHIVE_CHECKSUM_KEY], media_type="text/plain", ) except KeyError: msg = MSG_404.format( f"genome/asset:tag combination ({genome}/{asset}:{tag})") _LOGGER.warning(msg) raise HTTPException(status_code=404, detail=msg)
async def unlike_post( post_id: str, current_user: UserBase = Depends(get_current_active_user)): post = db.posts.find_one({'_id': ObjectId(post_id)}) if post is not None: result = db.posts.update_one( {'_id': ObjectId(post_id)}, {"$pull": { 'likes': { "username": current_user.username } }}, False, True) if result: return Response('', 204) else: return HTTPException(422, 'can\'t insert') else: return HTTPException(404, 'Post is not exist')
def get_default_token_image( token_type: int, color: Color, magic_color_svg: MagicColorSvg = Depends(get_magic_color_svg) ) -> Response: if token_type < 0 or token_type >= 1000: raise HTTPException( 400, 'The token_type must be between 0 and 999 for default tokens.') try: key = f"default/token_{token_type}" content = magic_color_svg.get_colored_svg(key, color) except NoSuchIMage: raise HTTPException(404, 'No such token_type.') media_type = 'image/svg+xml' response = Response(content=content, media_type=media_type) response.headers['Cache-Control'] = 'private,max-age=57600' return response
def setup_otp(db: Session = Depends(get_db)): """ Test route for displaying QR code for Google Authenticator """ # Assume this user exists, it's just a test anyway user = db.query(User).filter_by(username="******").first() img = qrcode.make(TOTPManager(user).provision(), image_factory=SvgImage) rendered_svg = etree.tostring(img.get_image()).decode() html = f""" <h1>Here is your QR code for GA</h1> <p> {rendered_svg} </p> """ return Response(content=html)
def get_map(lat : float, long: float): query = "SELECT id, location_name, location_type, location_parent FROM geostore WHERE ST_Within(ST_GeometryFromText('SRID=4326;POINT({0} {1})'), polygon) LIMIT 1".format(long, lat) print(query) result = connection.execute(query) count = result.rowcount if count <= 0: not_found_resp = {"status": "not found"} return Response(content=json.dumps(not_found_resp), status_code=404) resultset=[] row = "" while row is not None: row = result.fetchone() if row is not None: resultset.append(row) return resultset
def create_product(product_in: ProductIn, request: Request, settings: config.Settings = Depends(config.get_settings)): with ClusterRpcProxy(settings.cluster_rpc_proxy_config) as rpc: service_response = rpc.products.create(product_in.dict()) if service_response.get('error', None): status_code = status.HTTP_400_BAD_REQUEST error = service_response.get('error') if error.get('code', '') == 'VALIDATION_ERROR': status_code = status.HTTP_422_UNPROCESSABLE_ENTITY response_data = {'status_code': status_code, 'error': error} return JSONResponse(content=response_data, status_code=status_code) product_id = service_response.get('id') location = f'{request.url}{product_id}' headers = {'Location': location, 'Entity': product_id} return Response(status_code=status.HTTP_201_CREATED, headers=headers)
async def update_company(id: str, data: UpdateCompanySchema, auth: IsAdmin = Depends()): try: # Update company data in db result = db.companies.update_one( {'_id': ObjectId(id)}, {'$set': data.dict(exclude_unset=True)} ) except errors.InvalidId: raise HTTPException(status_code=400, detail='invalid id') except errors.DuplicateKeyError: raise HTTPException(status_code=400, detail='name already registered') except errors.WriteError: raise HTTPException(status_code=400, detail='nothing to update') if result.matched_count <= 0: raise HTTPException(status_code=404, detail='company not found') return Response(status_code=204)
async def decorator(*args, **kwargs) -> Optional[Any]: request = kwargs["_request"] handler_task = asyncio.create_task( handler(*args, **kwargs), name="cancellable_request/handler" ) auto_cancel_task = asyncio.create_task( _cancel_task_if_client_disconnected(request, handler_task), name="cancellable_request/auto_cancel", ) try: return await handler_task except CancelledError: logger.warning( "request %s was cancelled by client %s!", request.url, request.client ) return Response("Oh No!", status_code=499) finally: auto_cancel_task.cancel()
async def disable_deletion_protection( request: Request, user: User = Depends(get_current_active_user), code: str = Form(...), ): vm = VMBase(request) await vm.check_for_user() vm.app = await user_crud.disable_deletion_protection(user, code) res = Response("", status_code=200) res.headers["HX-Trigger"] = htmx.make_show_notification_header( res.headers, "Deletion Protection Disabled", f"You can now delete Your account.", "info", ) res.headers["HX-Trigger"] = htmx.make_event_header(res.headers, {"closeModal": {}}) return res
async def proxy_frontegg_requests( application_path: str, request: Request, ) -> Response: body = await request.body() host = request.headers.get('host') or request.client.host host = host.split(':')[0] response = await run_in_threadpool(self.proxy_request, request=request, method=request.method, path=application_path, host=host, body=body, headers=request.headers, params=request.query_params) return Response(content=response.content, status_code=response.status_code, headers=response.headers)
async def list_tags(name, request: Request): logger.debug(request.headers) registry = conf.get_registry_dir() resp = { "name": name, "tags": [ tag.strip() for tag in os.listdir(os.path.join(registry, "manifests", name)) if tag.strip() ] } logger.debug(resp) response = Response(content=json.dumps(resp)) response.headers["Content-Type"] = "application/json; charset=utf-8" response.status_code = status.HTTP_200_OK logger.debug(response.headers) return response
async def webhook(data: WebhookRequestData): """ Messages handler. """ if data.object == "page": for entry in data.entry: messaging_events = [ event for event in entry.get("messaging", []) if event.get("message") ] for event in messaging_events: message = event.get("message") sender_id = event["sender"]["id"] await send_message(page_access_token=os.environ["PAGE_ACCESS_TOKEN"], recipient_id=sender_id, message_text=f"echo: {message['text']}") return Response(content="ok")
def _wrap_response( status_code: int, msg: Any, error_code: Optional[str] = None, content_type: Optional[str] = DEFAULT_CONTENT_TYPE): resp = None if isinstance(msg, str): response_obj = { 'message': msg, } if not (status_code >= 200 and status_code < 300) and error_code: response_obj['errorCode'] = error_code resp = JSONResponse(content=response_obj, status_code=status_code) elif isinstance(msg, bytes): resp = Response(content=msg, media_type=content_type) else: resp = JSONResponse(content=msg, status_code=status_code) return resp
async def sparql_query_post(request: Request): url = f'{settings.OXIGRAPH_SERVER_URL}/query' headers = request.headers.items() body = await request.body() params = dict(request.query_params) async with httpx.AsyncClient() as client: r: httpx.Response client: httpx.AsyncClient r = await client.post(url=url, headers=headers, content=body, params=params) return Response(content=r.content, status_code=r.status_code, headers=r.headers, media_type=r.headers.get('content-type'))
def _make_rdf_response(self, graph, mimetype=None, headers=None, delete_graph=True): if headers is None: headers = self.headers response_text = graph.serialize(format=mimetype or "text/turtle") if delete_graph: # destroy the triples in the triplestore, then delete the triplestore # this helps to prevent a memory leak in rdflib graph.store.remove((None, None, None)) graph.destroy({}) del graph return Response( response_text, media_type=mimetype, headers=headers )
async def delete_subscription_by_endpoint( endpoint: str, db: MSSQLConnection = Depends(get_db)) -> Response: """ **Deletes a subscrtiption with supplied endpoint parameter.** """ # Perform deletion try: ret: int ret = await WebPushService(db).DeleteWebhook(endpoint) except InternalDatabaseError: raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) # if ret == 0: # return Response(status_code=status.HTTP_404_NOT_FOUND) return Response(status_code=status.HTTP_204_NO_CONTENT)
async def render_image(format: ExportFormat, agraph_info: _AGraphInfo = Depends(agraph)): if not agraph_info.nodes: raise HTTPException(404, dict(error="empty", msg="No nodes in graph", unknown_nodes=agraph_info.unknown_nodes)) proc = await create_subprocess_exec('dot', '-T', format.value, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE) try: output, errors = await wait_for(proc.communicate(codecs.encode(agraph_info.graph.to_string())), timeout=10) if proc.returncode != 0: raise HTTPException(513, f'Rendering {format} failed:\n\n{codecs.decode(errors)}') return Response(output, media_type=MIME_TYPES[format], headers={ 'Content-Disposition': f'attachment; filename="{agraph_info.basename}.{format.value}"'}) except asyncio.TimeoutError: proc.terminate() raise HTTPException(504, dict(error="timeout", msg="This layout is to complex to be rendered within the server's limits.\n" "Download a .dot file and run graphviz on your local machine to get a rendering."))