async def solve_dependencies( *, request: Request, dependant: Dependant, body: Dict[str, Any] = None, background_tasks: BackgroundTasks = None, ) -> Tuple[Dict[str, Any], List[ErrorWrapper], Optional[BackgroundTasks]]: values: Dict[str, Any] = {} errors: List[ErrorWrapper] = [] for sub_dependant in dependant.dependencies: sub_values, sub_errors, background_tasks = await solve_dependencies( request=request, dependant=sub_dependant, body=body, background_tasks=background_tasks, ) if sub_errors: errors.extend(sub_errors) continue assert sub_dependant.call is not None, "sub_dependant.call must be a function" if is_coroutine_callable(sub_dependant.call): solved = await sub_dependant.call(**sub_values) else: solved = await run_in_threadpool(sub_dependant.call, **sub_values) assert sub_dependant.name is not None, "Subdependants always have a name" values[sub_dependant.name] = solved path_values, path_errors = request_params_to_args( dependant.path_params, request.path_params ) query_values, query_errors = request_params_to_args( dependant.query_params, request.query_params ) header_values, header_errors = request_params_to_args( dependant.header_params, request.headers ) cookie_values, cookie_errors = request_params_to_args( dependant.cookie_params, request.cookies ) values.update(path_values) values.update(query_values) values.update(header_values) values.update(cookie_values) errors += path_errors + query_errors + header_errors + cookie_errors if dependant.body_params: body_values, body_errors = await request_body_to_args( # type: ignore # body_params checked above dependant.body_params, body ) values.update(body_values) errors.extend(body_errors) if dependant.request_param_name: values[dependant.request_param_name] = request if dependant.background_tasks_param_name: if background_tasks is None: background_tasks = BackgroundTasks() values[dependant.background_tasks_param_name] = background_tasks if dependant.security_scopes_param_name: values[dependant.security_scopes_param_name] = SecurityScopes( scopes=dependant.security_scopes ) return values, errors, background_tasks
async def end(self): """ Ends given match. """ match = await self.get() if match.error: return match values = { "match_id": self.match_id, } query = """UPDATE scoreboard_total SET status = 0 WHERE match_id = :match_id""" await self.current_league.obj.database.execute(query=query, values=values) # We just delete map pool for the given match. query = "DELETE FROM map_pool WHERE match_id = :match_id" await self.current_league.obj.database.execute(query=query, values=values) background_tasks = BackgroundTasks() league_details = await self.current_league.details() if not league_details.error: background_tasks.add_task( Webhook(uri=league_details["websocket_endpoint"], data=match.data).send) background_tasks.add_task( self.current_league.obj.server( server_id=match.data["server_id"]).stop) return response(data=match.data, backgroud=background_tasks)
def _tasks_from_event(self, event: Hashable, *args: Any, **kwargs: Any) -> BackgroundTasks: tasks = BackgroundTasks() for f in list(self._handlers[event].values()): tasks.add_task(f, *args, **kwargs) return tasks
async def _get_response(self, request: Request, data: QueryParams, variables: typing.Optional[dict]) -> Response: try: query = data["query"] except KeyError: return PlainTextResponse("No GraphQL query found in the request", 400) config = get_graphql_config(request) background = BackgroundTasks() context = {"req": request, "background": background, **config.context} engine: Engine = config.engine result: dict = await engine.execute( query, context=context, variables=variables, operation_name=data.get("operationName"), ) content = {"data": result["data"]} has_errors = "errors" in result if has_errors: content["errors"] = format_errors(result["errors"]) status = 400 if has_errors else 200 return JSONResponse(content=content, status_code=status, background=background)
def predict( request: Any = Body(..., media_type="application/json"), debug=False): api = local_cache["api"] predictor_impl = local_cache["predictor_impl"] debug_obj("payload", request, debug) prediction = predictor_impl.predict(request) debug_obj("prediction", prediction, debug) try: json_string = json.dumps(prediction) except: json_string = util.json_tricks_encoder().encode(prediction) response = Response(content=json_string, media_type="application/json") if api.tracker is not None: try: predicted_value = api.tracker.extract_predicted_value(prediction) api.post_tracker_metrics(predicted_value) if (api.tracker.model_type == "classification" and predicted_value not in local_cache["class_set"]): tasks = BackgroundTasks() tasks.add_task(api.upload_class, class_name=predicted_value) local_cache["class_set"].add(predicted_value) response.background = tasks except: cx_logger().warn("unable to record prediction metric", exc_info=True) return response
async def handle_graphql(self, request: Request) -> Response: if request.method in ("GET", "HEAD"): if "text/html" in request.headers.get("Accept", ""): if not self.graphiql: return PlainTextResponse( "Not Found", status_code=status.HTTP_404_NOT_FOUND) return await self.handle_graphiql(request) data = request.query_params # type: typing.Mapping[str, typing.Any] elif request.method == "POST": content_type = request.headers.get("Content-Type", "") if "application/json" in content_type: data = await request.json() elif "application/graphql" in content_type: body = await request.body() text = body.decode() data = {"query": text} elif "query" in request.query_params: data = request.query_params else: return PlainTextResponse( "Unsupported Media Type", status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, ) else: return PlainTextResponse( "Method Not Allowed", status_code=status.HTTP_405_METHOD_NOT_ALLOWED) try: query = data["query"] variables = data.get("variables") operation_name = data.get("operationName") except KeyError: return PlainTextResponse( "No GraphQL query found in the request", status_code=status.HTTP_400_BAD_REQUEST, ) background = BackgroundTasks() context = {"request": request, "background": background} result = await self.execute(query, variables=variables, context=context, operation_name=operation_name) error_data = ([format_error(err) for err in result.errors] if result.errors else None) response_data = {"data": result.data} if error_data: response_data["errors"] = error_data status_code = (status.HTTP_400_BAD_REQUEST if result.errors else status.HTTP_200_OK) print(status_code, response_data) return JSONResponse(response_data, status_code=status_code, background=background)
def predict( request: Any = Body(..., media_type="application/json"), debug=False): api = local_cache["api"] predictor_impl = local_cache["predictor_impl"] debug_obj("payload", request, debug) prediction = predictor_impl.predict(request) try: json_string = json.dumps(prediction) except Exception as e: raise UserRuntimeException( f"the return value of predict() or one of its nested values is not JSON serializable", str(e), ) from e debug_obj("prediction", json_string, debug) response = Response(content=json_string, media_type="application/json") if api.tracker is not None: try: predicted_value = api.tracker.extract_predicted_value(prediction) api.post_tracker_metrics(predicted_value) if (api.tracker.model_type == "classification" and predicted_value not in local_cache["class_set"]): tasks = BackgroundTasks() tasks.add_task(api.upload_class, class_name=predicted_value) local_cache["class_set"].add(predicted_value) response.background = tasks except: cx_logger().warn("unable to record prediction metric", exc_info=True) return response
async def handle_http_request(self, http_request: Request): background_tasks = BackgroundTasks() # noinspection PyTypeChecker sub_response = Response( content=None, status_code=None, headers=None, media_type=None, background=None, ) try: body = await self.parse_body(http_request) except Exception as exc: resp, _ = await self.entrypoint.handle_exception_to_resp(exc) response = self.response_class(content=resp, background=background_tasks) else: try: resp = await self.handle_body(http_request, background_tasks, sub_response, body) except NoContent: # no content for successful notifications response = Response(media_type='application/json', background=background_tasks) else: response = self.response_class(content=resp, background=background_tasks) response.headers.raw.extend(sub_response.headers.raw) if sub_response.status_code: response.status_code = sub_response.status_code return response
async def resolve_get_user(user, obj, info): task = BackgroundTasks() task.add_task(test_func) task.add_task(testing_func_two, "I work now") request = info.context["request"] request.state.background = task return True
async def app(scope, receive, send): tasks = BackgroundTasks() tasks.add_task(increment) tasks.add_task(increment) response = Response("tasks initiated", media_type="text/plain", background=tasks) await response(scope, receive, send)
async def asgi(receive, send): tasks = BackgroundTasks() tasks.add_task(increment, amount=1) tasks.add_task(increment, amount=2) tasks.add_task(increment, amount=3) response = Response("tasks initiated", media_type="text/plain", background=tasks) await response(receive, send)
async def background(request): """$ http :8000/background""" tasks = BackgroundTasks() tasks.add_task(_background, value='spam') tasks.add_task(_background, value='ham') tasks.add_task(_background, value='eggs') return JSONResponse({'background': 'logging'}, background=tasks)
async def _get_context_value(self, request: HTTPConnection) -> Any: if callable(self.context_value): context = self.context_value(request) if isawaitable(context): context = await context return context else: return self.context_value or { "request": request, "background": BackgroundTasks(), }
def mail(): with smtplib.SMTP_SSL("smtp.gmail.com", 465) as smtp: smtp.login("*****@*****.**", "ksbdcllupwjkgufk") subject = "owonikoko ennebee" body = "testing email aadress" msg = f"subject: {subject}\n\n{body}" BackgroundTasks(tasks=[ smtp.sendmail("*****@*****.**", "*****@*****.**", msg) ]) print("done sending")
async def background_jobs(request): def sync_task(): print("Doing the sync task ! ✨") async def async_task(): print("Doing the async task ! 🎉") tasks = BackgroundTasks() tasks.add_task(sync_task) tasks.add_task(async_task) return PlainTextResponse("Triggering background jobs", background=tasks)
async def register_dicom(request): """Endpoint for registering newly received DICOM files. Called by the getdcmtags module.""" payload = dict(await request.form()) filename = payload.get("filename","") file_uid = payload.get("file_uid","") series_uid = payload.get("series_uid","") query = dicom_files.insert().values( filename=filename, file_uid=file_uid, series_uid=series_uid, time=datetime.datetime.now() ) tasks = BackgroundTasks() tasks.add_task(execute_db_operation, operation=query) return JSONResponse({'ok': ''}, background=tasks)
async def background_jobs(request): def sync_noop(): pass async def async_noop(): pass tasks = BackgroundTasks() tasks.add_task(sync_noop) tasks.add_task(async_noop) return PlainTextResponse("Triggering background jobs", background=tasks)
async def post_webgui_event(request): """Endpoint for logging relevant events of the webgui.""" payload = dict(await request.form()) sender = payload.get("sender","Unknown") event = payload.get("event",monitor.w_events.UNKNOWN) user = payload.get("user","UNKNOWN") description = payload.get("description","") query = webgui_events.insert().values( sender=sender, event=event, user=user, description=description, time=datetime.datetime.now() ) tasks = BackgroundTasks() tasks.add_task(execute_db_operation, operation=query) return JSONResponse({'ok': ''}, background=tasks)
async def post_mercure_event(request): """Endpoint for receiving mercure system events.""" payload = dict(await request.form()) sender = payload.get("sender","Unknown") event = payload.get("event",monitor.h_events.UNKNOWN) severity = int(payload.get("severity",monitor.severity.INFO)) description = payload.get("description","") query = mercure_events.insert().values( sender=sender, event=event, severity=severity, description=description, time=datetime.datetime.now() ) tasks = BackgroundTasks() tasks.add_task(execute_db_operation, operation=query) return JSONResponse({'ok': ''}, background=tasks)
def predict(request: Request): tasks = BackgroundTasks() api = local_cache["api"] predictor_impl = local_cache["predictor_impl"] dynamic_batcher = local_cache["dynamic_batcher"] kwargs = build_predict_kwargs(request) if dynamic_batcher: prediction = dynamic_batcher.predict(**kwargs) else: prediction = predictor_impl.predict(**kwargs) if isinstance(prediction, bytes): response = Response(content=prediction, media_type="application/octet-stream") elif isinstance(prediction, str): response = Response(content=prediction, media_type="text/plain") elif isinstance(prediction, Response): response = prediction else: try: json_string = json.dumps(prediction) except Exception as e: raise UserRuntimeException( str(e), "please return an object that is JSON serializable (including its nested fields), a bytes object, " "a string, or a starlette.response.Response object", ) from e response = Response(content=json_string, media_type="application/json") if local_cache["provider"] != "local" and api.monitoring is not None: try: predicted_value = api.monitoring.extract_predicted_value( prediction) api.post_monitoring_metrics(predicted_value) if (api.monitoring.model_type == "classification" and predicted_value not in local_cache["class_set"]): tasks.add_task(api.upload_class, class_name=predicted_value) local_cache["class_set"].add(predicted_value) except: logger().warn("unable to record prediction metric", exc_info=True) if util.has_method(predictor_impl, "post_predict"): kwargs = build_post_predict_kwargs(prediction, request) request_thread_pool.submit(predictor_impl.post_predict, **kwargs) if len(tasks.tasks) > 0: response.background = tasks return response
async def handle_http_request(self, http_request: Request): background_tasks = BackgroundTasks() try: body = await self.parse_body(http_request) except Exception as exc: resp = await self.entrypoint.handle_exception_to_resp(exc) else: try: resp = await self.handle_body(http_request, background_tasks, body) except NoContent: # no content for successful notifications return Response(media_type='application/json', background=background_tasks) return self.response_class(content=resp, background=background_tasks)
async def signup(request): print(request.method) if request.method == 'POST': data = await request.json() if data: username = data['username'] email = data['email'] else: username = '******' email = 'email' tasks = BackgroundTasks() tasks.add_task(send_welcome_email, to_address=email) tasks.add_task(send_admin_notification, username=username) message = {'status': 'Signup successful'} return JSONResponse(message, background=tasks)
async def post_series_event(request): """Endpoint for logging all events related to one series.""" payload = dict(await request.form()) sender = payload.get("sender","Unknown") event = payload.get("event",monitor.s_events.UNKNOWN) series_uid = payload.get("series_uid","") file_count = payload.get("file_count",0) target = payload.get("target","") info = payload.get("info","") query = series_events.insert().values( sender=sender, event=event, series_uid=series_uid, file_count=file_count, target=target, info=info, time=datetime.datetime.now() ) tasks = BackgroundTasks() tasks.add_task(execute_db_operation, operation=query) return JSONResponse({'ok': ''}, background=tasks)
async def build_response( self, coroutine: typing.Awaitable, status_code: int = 400, no_db=False, redirect=False, redirect_key=None, ) -> typing.Union[JSONResponse, RedirectResponse]: if self.is_serverless and not no_db: await self.connect_db() result: SResult = await coroutine tasks = BackgroundTasks() if result.errors: return self.json_response( { "status": False, **result.errors }, status_code=400, tasks=tasks, no_db=no_db, ) if result.task: for i in result.task: if type(i) in [list, tuple]: try: dict_index = [type(o) for o in i].index(dict) kwarg_props = i[dict_index] args_props = i[0:dict_index] tasks.add_task(*args_props, **kwarg_props) except ValueError: tasks.add_task(*i) else: tasks.add_task(i) if redirect and redirect_key and result.data: redirect_url = result.data.get(redirect_key) return self.json_response(redirect_url, redirect=True, status_code=301) _result: typing.Dict[str, typing.Any] = {"status": True} if result.data: _result.update(data=result.data) return self.json_response(_result, tasks=tasks, no_db=no_db)
async def grade_batch(request: Request): """ responses: 200: description: Return job ids examples: { "jobs": ['id1', 'id2', ...] } """ request_data = await request.json() logging.info("received batch request") _debug_print_json(request_data) background_job = BackgroundTasks() job_ids = [] for submission in request_data["subm_ids"]: job_id = uuid4().hex app.db[f"job_status_{job_id}"] = "QUEUED" background_job.add_task( kick_off_grading_job, assignment_token=request_data["assignment"], submission_id=submission, access_token=request_data["access_token"], job_id=job_id, ) job_ids.append(job_id) create_grading_job( "GradingJobConfig.yml", { "name": f"pandas-grader-{job_id}", "parallelism": 50, "num_jobs": len(job_ids), "api_addr": "http://52.10.157.13:8000", }, ) return JSONResponse({"jobs": [job_id for job_id in job_ids]}, background=background_job)
def predict(request: Any = Body(..., media_type="application/json"), debug=False): api = local_cache["api"] predictor_impl = local_cache["predictor_impl"] debug_obj("payload", request, debug) prediction = predictor_impl.predict(request) if isinstance(prediction, bytes): response = Response(content=prediction, media_type="application/octet-stream") elif isinstance(prediction, str): response = Response(content=prediction, media_type="text/plain") debug_obj("prediction", prediction, debug) elif isinstance(prediction, Response): response = prediction else: try: json_string = json.dumps(prediction) debug_obj("prediction", prediction, debug) except Exception as e: raise UserRuntimeException( str(e), "please return an object that is JSON serializable (including its nested fields), a bytes object, a string, or a starlette.response.Response object", ) from e response = Response(content=json_string, media_type="application/json") if local_cache["provider"] != "local" and api.tracker is not None: try: predicted_value = api.tracker.extract_predicted_value(prediction) api.post_tracker_metrics(predicted_value) if ( api.tracker.model_type == "classification" and predicted_value not in local_cache["class_set"] ): tasks = BackgroundTasks() tasks.add_task(api.upload_class, class_name=predicted_value) local_cache["class_set"].add(predicted_value) response.background = tasks except: cx_logger().warn("unable to record prediction metric", exc_info=True) return response
async def analyze(request): learn = load_learner(path / "models") data = await request.form() img_bytes = await data["file"].read() img = open_image(BytesIO(img_bytes)) ext = data["file"].filename.split(".")[-1] filename = f"{str(uuid4())}.{ext}" prediction = learn.predict(img) score = prediction[2].data[prediction[1].item()].item() pred_str = str(prediction[0]) if score > 0.8 else "not red flower" res = { "filename": filename, "prediction": pred_str, "score": score, } tasks = BackgroundTasks() if not test: tasks.add_task(store, f=BytesIO(img_bytes), filename=filename) tasks.add_task(write_metadata, payload=res) # TODO: add score return JSONResponse(res, background=tasks)
async def post(request): """Handle form request.""" data = dict(request.state.payload) if data.pop('_honey', None): return config = { 'meta': { 'form': data.pop('_form', None), 'user_id': data.pop('_user_id', None), 'user_ip': request.headers.get('X-Real-Ip'), 'user_agent': request.headers.get('User-Agent'), 'referer': data.pop('_referer', request.headers.get('Referer')), 'redirect': data.pop('_redirect', settings.DEFAULT_REDIRECT), 'webhook': data.pop('_webhook', settings.DEFAULT_WEBHOOK) }, 'email': { 'from': settings.SMTP_FROM, 'to': data.pop('_to', settings.DEFAULT_TO), 'reply_to': data.pop('_reply_to', data.get('email')), 'cc': data.pop('_cc', settings.DEFAULT_CC), 'bcc': data.pop('_bcc', settings.DEFAULT_BCC), 'subject': data.pop('_subject', settings.DEFAULT_SUBJECT), 'text': prepare_text(data) }, 'data': data } background = BackgroundTasks() background.add_task(tasks.send_email, config=config) background.add_task(tasks.send_webhook, config=config) return RedirectResponse(config['meta']['redirect'], 303, background=background)
async def run_bg_task(request): tasks = BackgroundTasks() tasks.add_task(bg_task_async) tasks.add_task(bg_task_non_async) return PlainTextResponse("Hello, world!", background=tasks)
async def solve_dependencies( *, request: Union[Request, WebSocket], dependant: Dependant, body: Optional[Union[Dict[str, Any], FormData]] = None, background_tasks: Optional[BackgroundTasks] = None, response: Optional[Response] = None, dependency_overrides_provider: Optional[Any] = None, dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None, ) -> Tuple[Dict[str, Any], List[ErrorWrapper], Optional[BackgroundTasks], Response, Dict[Tuple[Callable[..., Any], Tuple[str]], Any], ]: values: Dict[str, Any] = {} errors: List[ErrorWrapper] = [] response = response or Response( content=None, status_code=None, # type: ignore headers=None, # type: ignore # in Starlette media_type=None, # type: ignore # in Starlette background=None, # type: ignore # in Starlette ) dependency_cache = dependency_cache or {} sub_dependant: Dependant for sub_dependant in dependant.dependencies: sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) sub_dependant.cache_key = cast(Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key) call = sub_dependant.call use_sub_dependant = sub_dependant if (dependency_overrides_provider and dependency_overrides_provider.dependency_overrides): original_call = sub_dependant.call call = getattr(dependency_overrides_provider, "dependency_overrides", {}).get(original_call, original_call) use_path: str = sub_dependant.path # type: ignore use_sub_dependant = get_dependant( path=use_path, call=call, name=sub_dependant.name, security_scopes=sub_dependant.security_scopes, ) use_sub_dependant.security_scopes = sub_dependant.security_scopes solved_result = await solve_dependencies( request=request, dependant=use_sub_dependant, body=body, background_tasks=background_tasks, response=response, dependency_overrides_provider=dependency_overrides_provider, dependency_cache=dependency_cache, ) ( sub_values, sub_errors, background_tasks, _, # the subdependency returns the same response we have sub_dependency_cache, ) = solved_result dependency_cache.update(sub_dependency_cache) if sub_errors: errors.extend(sub_errors) continue if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: solved = dependency_cache[sub_dependant.cache_key] elif is_gen_callable(call) or is_async_gen_callable(call): stack = request.scope.get("fastapi_astack") if stack is None: raise RuntimeError(async_contextmanager_dependencies_error ) # pragma: no cover solved = await solve_generator(call=call, stack=stack, sub_values=sub_values) elif is_coroutine_callable(call): solved = await call(**sub_values) else: solved = await run_in_threadpool(call, **sub_values) if sub_dependant.name is not None: values[sub_dependant.name] = solved if sub_dependant.cache_key not in dependency_cache: dependency_cache[sub_dependant.cache_key] = solved path_values, path_errors = request_params_to_args(dependant.path_params, request.path_params) query_values, query_errors = request_params_to_args( dependant.query_params, request.query_params) header_values, header_errors = request_params_to_args( dependant.header_params, request.headers) cookie_values, cookie_errors = request_params_to_args( dependant.cookie_params, request.cookies) values.update(path_values) values.update(query_values) values.update(header_values) values.update(cookie_values) errors += path_errors + query_errors + header_errors + cookie_errors if dependant.body_params: ( body_values, body_errors, ) = await request_body_to_args( # body_params checked above required_params=dependant.body_params, received_body=body) values.update(body_values) errors.extend(body_errors) if dependant.http_connection_param_name: values[dependant.http_connection_param_name] = request if dependant.request_param_name and isinstance(request, Request): values[dependant.request_param_name] = request elif dependant.websocket_param_name and isinstance(request, WebSocket): values[dependant.websocket_param_name] = request if dependant.background_tasks_param_name: if background_tasks is None: background_tasks = BackgroundTasks() values[dependant.background_tasks_param_name] = background_tasks if dependant.response_param_name: values[dependant.response_param_name] = response if dependant.security_scopes_param_name: values[dependant.security_scopes_param_name] = SecurityScopes( scopes=dependant.security_scopes) return values, errors, background_tasks, response, dependency_cache