def sent_recovery_password_email(email: str, background_task: BackgroundTasks) -> any: """ Verify is the email belogs to an active user and send a recovery password email. """ token = userService.get_token_recovery_password(email) background_task.add_task( send_recovery_password_email, token=token, email=email, ) return responses.EmailMsg(detail="Password recovery email sent")
async def signin( credentials: Credentials, request: Request, background_tasks: BackgroundTasks, ) -> SignInRes: """ generates new token if provided user_id and password are correct """ background_tasks.add_task(self.cleanup_tokens) return SignInRes(response=await self.signin( request=request, password=credentials.password, identifier=credentials.identifier, ))
def buy_product(pid: int, cid: int, num: int, background_tasks: BackgroundTasks): """ buy a product :param pid: product id :param cid: customer id :param num: product num :param background_tasks: bg context :return: None """ trans_manager = trans_service.get_manager() background_tasks.add_task(trans_manager.call, MarketTransCode.BUY_PRODUCT, pid, cid, num) return Resp.ok(message='invoked buy product transaction')
def notes_post( entry_id, data: M.SINote, background_tasks: BackgroundTasks, as_user: str = None, viewer: M.User = Depends(fastapi_users.get_current_user), ): user, snooping = getuser(viewer, as_user) # TODO handle snooping n = M.Note(user_id=viewer.id, entry_id=entry_id, **data.dict()) db.session.add(n) db.session.commit() background_tasks.add_task(ga, viewer.id, 'feature', 'notes') return M.Note.snoop(viewer.id, user.id, entry_id).all()
def post_file_to_channel( background_tasks: BackgroundTasks, files: List[UploadFile] = File(...), force: Optional[bool] = Form(None), channel: db_models.Channel = Depends( ChannelChecker(allow_proxy=False, allow_mirror=False) ), dao: Dao = Depends(get_dao), auth: authorization.Rules = Depends(get_rules), ): handle_package_files(channel.name, files, dao, auth, force) # Background task to update indexes background_tasks.add_task(indexing.update_indexes, dao, pkgstore, channel.name)
async def post( model: create_model, background_tasks: BackgroundTasks # type: ignore ): try: obj = await orm_model.create(**model.dict()) # type: ignore except ( asyncpg.exceptions.UniqueViolationError, asyncpg.exceptions.NotNullViolationError, asyncpg.exceptions.ForeignKeyViolationError, ) as e: raise HTTPException(422, e.message) if background_tasks_mapping.get("post"): background_tasks.add_task(background_tasks_mapping["post"], obj) return obj
def scan_github_project( payload: models.GitHubPushPayload, background_task: BackgroundTasks, db: Session = Depends(dependencies.get_db), ): background_task.add_task( project_analysis, project_name=payload.repository.full_name, analysis=schemas.AnalysisBase( branch_name=get_branch_name(payload.ref)), scan_origin="github-webhook", db=db, ) return {"message": "Notification sent in the background"}
async def root(word: str, background_task: BackgroundTasks): task_name = None # set correct task name based on the way you run the example if not bool(os.getenv('DOCKER')): task_name = "app.worker.celery_worker.test_celery" else: task_name = "app.app.worker.celery_worker.test_celery" task = celery_app.send_task(task_name, args=[word]) print(task) background_task.add_task(background_on_message, task) return {"message": "Word received"}
def create_task(request: Request, task: TaskRequest, background_task: BackgroundTasks): """ 接口请求参数: taskType:操作类型 taskArgs:具体的数据 0、这个接口需要做限流 1、利用 FastAPI 的 BackgroundTask 去传递任务信息 2、前端在收到这个接口返回之后去请求 checkTaskStatus 获取任务的状态 3、BackgroundTask 往进程队列发送任务 """ if task.taskType != "create": return error_response(message="ErrorTaskType") # 参数校验 task_args: Optional[Dict] = task.taskArgs if task_args is None or isinstance(task.taskArgs, dict) is not True: return error_response(message="ErrorTaskArgs") spider_name: Optional[str] = task_args.get("spiderName") if spider_name is None: return error_response(message="ErrorTaskArgs") spider_name = spider_name.lower() if spider_name not in SPIDER_SUPPORT_LIST: return error_response(message="ErrorTaskArgs - spiderName is Error") spider_username: Optional[str] = task_args.get("username") if spider_username is None: return error_response(message="ErrorTaskArgs") spider_password: Optional[str] = task_args.get("password") if spider_password is None: return error_response(message="ErrorTaskArgs") # 任务 ID task_id: str = md5_str( encrypt_str=f"{spider_name}-*-{spider_username}-*-{spider_password}") task_result = app_redis_handler.find_key(key=f"spider_task:{task_id}") if task_result is None or task_result == str(PROCESS_STATUS_FAIL): # 通过 BackgroundTask 提交异步任务 background_task.add_task(submit_async_task, task_id, spider_name, spider_username, spider_password) return success_response(data=task_id) else: return error_response( message="Task is Exist! Do not duplicate create!")
async def submit( cls, *, background_tasks: BackgroundTasks, celery_app: Celery, problem_submit: ProblemSolutionSubmit, problem_set: Optional["ProblemSet"], problem: "Problem", user: "******", ) -> "Record": problem_config = await problem.get_latest_problem_config() if problem_config is None: raise BizError(ErrorCode.ProblemConfigNotFoundError) if (problem_submit.code_type == RecordCodeType.archive and problem_submit.file is None): raise BizError(ErrorCode.Error) problem_set_id = problem_set.id if problem_set else None record = cls( domain_id=problem.domain_id, problem_set_id=problem_set_id, problem_id=problem.id, problem_config_id=problem_config.id, committer_id=user.id, ) await record.save_model(commit=False, refresh=False) problem.num_submit += 1 await problem.save_model(commit=True, refresh=True) await record.refresh_model() key = cls.get_user_latest_record_key(problem_set_id, problem.id, user.id) value = RecordPreview(id=record.id, state=record.state, created_at=record.created_at) cache = get_redis_cache() await cache.set(key, value, namespace="user_latest_records") background_tasks.add_task( record.upload, celery_app=celery_app, problem_submit=problem_submit, problem=problem, ) return record
async def update_version( *, dataset: str = Depends(dataset_dependency), version: str = Depends(version_dependency), request: VersionUpdateIn, background_tasks: BackgroundTasks, is_authorized: bool = Depends(is_admin), ): """Partially update a version of a given dataset. When using PATCH and uploading files, this will overwrite the existing source(s) and trigger a complete update of all managed assets. """ input_data = request.dict(exclude_none=True, by_alias=True) if "source_uri" in input_data: curr_version: ORMVersion = await versions.get_version(dataset, version) #if curr_version.is_mutable: # append input_data["creation_options"] = curr_version.creation_options # use same creation options for append input_data["source_type"] = curr_version.source_type # use same default asset type assets: List[ORMAsset] = await get_assets(dataset, version) for asset in assets: if asset.asset_type == default_asset_type(curr_version.source_type): default_asset: ORMAsset = asset background_tasks.add_task(append_default_asset, dataset, version, input_data, default_asset.asset_id) version_update_data = deepcopy(input_data) version_update_data["source_uri"] += curr_version.source_uri row: ORMVersion = await versions.update_version(dataset, version, **version_update_data) # else: # # overwrite # raise HTTPException( # status_code=501, # detail="Not supported." # "Overwriting version sources is not supported", # ) else: row: ORMVersion = await versions.update_version(dataset, version, **input_data) return await _version_response(dataset, version, row)
async def handle_event( event: EventEnvelope, request: Request, response: Response, background_tasks: BackgroundTasks, x_slack_request_timestamp: int = Header(None), x_slack_signature: str = Header(None), db_session: Session = Depends(get_db), ): """Handle all incomming Slack events.""" raw_request_body = bytes.decode(await request.body()) # We verify the timestamp verify_timestamp(x_slack_request_timestamp) # We verify the signature verify_signature(raw_request_body, x_slack_request_timestamp, x_slack_signature) # Echo the URL verification challenge code back to Slack if event.challenge: return {"challenge": event.challenge} event_body = event.event if ( event_body.type == "message" and event_body.subtype ): # We ignore messages that have a subtype # Parse the Event payload and emit the event to the event listener response.headers["X-Slack-Powered-By"] = create_ua_string() return {"ok"} user_id = event_body.user channel_id = get_channel_id(event_body) conversation = get_by_channel_id(db_session=db_session, channel_id=channel_id) if conversation and dispatch_slack_service.is_user(user_id): # We create an async Slack client slack_async_client = dispatch_slack_service.create_slack_client(run_async=True) # We resolve the user's email user_email = await dispatch_slack_service.get_user_email_async(slack_async_client, user_id) # Dispatch event functions to be executed in the background for f in event_functions(event): background_tasks.add_task(f, user_email, conversation.incident_id, event=event) # We add the user-agent string to the response headers response.headers["X-Slack-Powered-By"] = create_ua_string() return {"ok"}
def create_fetcher_settings( request: pyd_models.FetcherSettings, background_tasks: BackgroundTasks, db: Session = Depends(get_db), ): """ Returns the latest settings for every fetcher """ updated_fetcher_settings = frontier.set_fetcher_settings(request, db) if database.fqdn_hash_activated(db): background_tasks.add_task(database.refresh_fqdn_hashes, db) return updated_fetcher_settings
def delete_eks_cluster( cluster_name: str, region: str, background_tasks: BackgroundTasks, nodegroup_names: List[str] = Body(..., example=[]), ) -> dict: eks_api = EKSApi(region=region) background_tasks.add_task(eks_api.delete, cluster_name=cluster_name, nodegroup_names=nodegroup_names) return { "message": f"EKS delete request submitted for {cluster_name} cluster" f" and nodegroups: {', '.join(nodegroup_names)}" }
async def receive_token(token: int, background_tasks: BackgroundTasks, request: Request): logging.info(f'Received token {token} from {request.client.host}') Globals.round = token notifier.raise_event("received_token", token=token) # Slow down the communication a bit await asyncio.sleep(1) background_tasks.add_task(bgtask) return {"status": "token received"}
def handle_modal_action(action: dict, background_tasks: BackgroundTasks): """Handles all modal actions.""" view_data = action["view"] view_data["private_metadata"] = json.loads(view_data["private_metadata"]) action_id = view_data["callback_id"] incident_id = view_data["private_metadata"].get("incident_id") channel_id = view_data["private_metadata"].get("channel_id") user_id = action["user"]["id"] user_email = action["user"]["email"] for f in action_functions(action_id): background_tasks.add_task(f, user_id, user_email, channel_id, incident_id, action)
def send_email_backround(background_tasks: BackgroundTasks, subject: str, recipients: list[EmailStr], template: str, body: dict, attachments: list[dict] = []): message = MessageSchema(subject=subject, recipients=recipients, template_body=body, subtype='html', attachments=attachments) fm = FastMail(_conf) background_tasks.add_task(fm.send_message, message, template_name=template)
async def sync(request: Request, sync_type: SyncTypeSet, background_tasks: BackgroundTasks, db: Session = Depends(get_db), args: Optional[List[str]] = Query([]), xargs_keys: Optional[List[str]] = Query([]), xargs_vals: Optional[List[str]] = Query([])): """ Sync the given sync type """ kv = dict(zip(xargs_keys, xargs_vals)) sync = Sync(sync_type, db, request, *args, **kv) background_tasks.add_task(sync.start) return ORJSONResponse(content=dict(job_id=sync.job_id))
async def stop_cron( signin_request: SignIn, background_tasks: BackgroundTasks, db: Session = Depends(get_db), ): users = db.query(User) user = ( users.filter(User.username == signin_request.username) .filter(User.password == signin_request.password) .first() ) if user: background_tasks.add_task(stop_cronjob) return {"code": "success"} return {"code": "failed"}
async def process_upload( task: BackgroundTasks, x_neo4j_user: str = Header("neo4j"), x_neo4j_pass: str = Header("password"), upload: UploadFile = File(...), ): upload.file.rollover() task.add_task( sshandler.process, upload.file._file, upload.filename, x_neo4j_user, x_neo4j_pass, ) return {"status": "Upload Success"}
async def set_endpoint( background_tasks: BackgroundTasks, endpoint: Endpoint, current_user: User = Depends(auth.get_current_user), ): """ Saves or Updates the bot endpoint configuration """ mongo_processor.add_endpoints(endpoint.dict(), current_user.get_bot(), current_user.get_user()) if endpoint.action_endpoint: background_tasks.add_task(AgentProcessor.reload, current_user.get_bot()) return {"message": "Endpoint saved successfully!"}
async def process_upload( task: BackgroundTasks, x_neo4j_user: str = Header("neo4j"), x_neo4j_pass: str = Header("password"), upload: UploadFile = File(...), ): contents = await upload.read() task.add_task( airohandler.process, contents, upload.filename, x_neo4j_user, x_neo4j_pass, ) return {"status": "Upload Success"}
async def create_stock( stock_request: StockRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db), ): """ created a stock and stores it in the database """ stock = Stock(symbol=stock_request.symbol) db.add(stock) db.commit() background_tasks.add_task(fetch_stock_data, stock.id) return {"code": "success", "message": "stock created"}
async def send_in_background(background_tasks: BackgroundTasks, email: EmailSchema) -> JSONResponse: message = MessageSchema( subject="Fastapi mail module", recipients=email.dict().get("email"), body="Simple background task ", ) fm = FastMail(conf) background_tasks.add_task(fm.send_message, message) return JSONResponse(status_code=200, content={"message": "email has been sent"})
async def register(tasks: BackgroundTasks, req: Request, resp: Response, form: RegisterForm): email = await credential_ctl.get_email_validate_token( 'register', form.validate_token) if not email: raise auth_error.InvalidValidateTokenError() member = await member_ctl.get_member(email=email) if member: raise auth_error.EMailUsedError() member = await member_ctl.create_member(name=form.name, email=email, password=form.password) await auth_web_ctl.create_credential(req, resp, member.id) tasks.add_task(worker.external.bind_member_character, member.id) return BasicResponse()
def handle_dialog_action(action: dict, background_tasks: BackgroundTasks, db_session: SessionLocal): """Handles all dialog actions.""" channel_id = action["channel"]["id"] conversation = get_by_channel_id(db_session=db_session, channel_id=channel_id) incident_id = conversation.incident_id user_id = action["user"]["id"] user_email = action["user"]["email"] action_id = action["callback_id"] for f in dialog_action_functions(action_id): background_tasks.add_task(f, user_id, user_email, incident_id, action)
async def add_crypto_symbol( background_tasks: BackgroundTasks, response: Response, symbol: str = Query(..., title="Symbol of the cryptocurrency", description="Symbol to be added to the database.")): profiles = await fh.get_crypto_symbols(symbol.split(":")[0]) for profile in profiles: if profile["symbol"] == symbol: background_tasks.add_task(add_crypto_tasks, symbol=symbol, profile=profile) return profile response.status_code = status.HTTP_404_NOT_FOUND return {"error": "Profile not found"}
def join_incident( *, db_session: Session = Depends(get_db), current_incident: Incident = Depends(get_current_incident), current_user: DispatchUser = Depends(get_current_user), background_tasks: BackgroundTasks, ): """ Join an individual incident. """ background_tasks.add_task( incident_add_or_reactivate_participant_flow, current_user.email, incident_id=current_incident.id, )
async def generate( background_tasks: BackgroundTasks, settings: Settings = Depends(get_settings), results: UploadFile = File(...), repo: ReportRepo = Depends(get_report_repo), ): await repo.save_results(results) file_report: Path = await repo.make_report() if settings.stats_handler_maker is not None: logger.info("Added stats handler for response") background_tasks.add_task( settings.stats_handler_maker(await repo.get_stats())) return FileResponse(str(file_report))
def resend_verification_email( background_tasks: BackgroundTasks, user: User = Depends(dependencies.get_current_user), session: Session = Depends(dependencies.get_database_session), ): if user.email_verified: raise HTTPException(400, "User email already verified") background_tasks.add_task( util.send_email, to=user.email, subject="Verify Acccount", message=generate_email_verification_text(user), html=generate_email_verification_html(user), ) return {"status": "Success", "message": "Email sent successfully"}