async def get_health_ready(): return Response(status_code=status.HTTP_204_NO_CONTENT)
async def verify_fb_token(token_sent, request: Request): # take token sent by Facebook and verify it matches the verify token you sent # if they match, allow the request, else return an error if token_sent == VERIFY_TOKEN: return Response(content=request.query_params["hub.challenge"]) return 'Invalid verification token'
def delte_patient(pk: int, is_logged: bool = Depends(is_logged)): if pk in app.storage: del app.storage[pk] return Response(status_code=status.HTTP_204_NO_CONTENT)
async def get_comp_section(name: str, response: Response): try: return {'result': 'Success.', 'content': get_section(name)} except ValueError: response.status_code = status.HTTP_404_NOT_FOUND return {'result': 'Section not found.'}
def create_cookie(response: Response): response.set_cookie(key="fakesession", value="fake-cookie-session-value") return {"message": "Come to the dark side, we have cookies"}
async def create_link(entityLeft: Entity, entityRight: Entity, linkArgs: LinkArgs, response: Response): try: validate_link(entityLeft.system, entityLeft.type, entityRight.system, entityRight.type) except Exception as inst: response.status_code = 410 return response leftEntityCreated = False rightEntityCreated = False linkCreated = False left = req.get(url=API_ENDPOINT_OBJECTS + "?registeredId=" + entityLeft.registeredId) left_data = left.json() if not left_data: left_resp = req.post(url=API_ENDPOINT_OBJECTS, data=entityLeft.dict()).json() left_uuid = left_resp["uuid"] leftEntityCreated = True else: left_uuid = left_data[0]["uuid"] right = req.get(url=API_ENDPOINT_OBJECTS + "?registeredId=" + entityRight.registeredId) right_data = right.json() if not right_data: right_resp = req.post(url=API_ENDPOINT_OBJECTS, data=entityRight.dict()).json() right_uuid = right_resp["uuid"] rightEntityCreated = True else: right_uuid = right_data[0]["uuid"] l = req.get(url=API_ENDPOINT_LINKS + "link" + "?left=" + left_uuid + "&right=" + right_uuid) if not l.text: link = Link(left=left_uuid, right=right_uuid, type=linkArgs.type, oriented=linkArgs.oriented) l_uuid = req.post(url=API_ENDPOINT_LINKS, data=link.dict()).json() linkCreated = True if (leftEntityCreated): logging.warning("WARNING: Left entity is not yet created.\n" + "left entity id:" + entityLeft.registeredId + "\n" + "right entity id:" + entityRight.registeredId + "\n" + "link type:" + linkArgs.type + "\n") if (not leftEntityCreated and not rightEntityCreated and not linkCreated): logging.warning("WARNING: Possible duplicated message.\n" + "left entity id:" + entityLeft.registeredId + "\n" + "right entity id:" + entityRight.registeredId + "\n" + "link type:" + linkArgs.type + "\n") if (leftEntityCreated and not rightEntityCreated): logging.warning( "WARNING: One message can came before the other, kafka or producer issue.\n" + "left entity id:" + entityLeft.registeredId + "\n" + "right entity id:" + entityRight.registeredId + "\n" + "link type:" + linkArgs.type + "\n") return
async def root(request: Request, response: Response, api_key_extraction=Depends(load_user_from_auth)): if not api_key_extraction: response.status_code = status.HTTP_403_FORBIDDEN return {'error': 'Forbidden'} if not api_key_extraction['token']: response.status_code = status.HTTP_403_FORBIDDEN return {'error': 'Forbidden'} req_args = await request.json() payload = req_args['payload'] token = api_key_extraction['token'] if settings.file_upload_method == "sia": h = hashlib.sha256() h.update(payload.encode()) sha_payload_hash = h.hexdigest() upload_to_sia(sha_payload_hash, payload) payload_hash = '0x' + keccak(text=payload).hex() token_hash = '0x' + keccak(text=token).hex() tx_hash_obj = contract.commitRecordHash( **dict(payloadHash=payload_hash, apiKeyHash=token_hash)) tx_hash = tx_hash_obj[0]['txHash'] rest_logger.debug('Committed record append to contract..') rest_logger.debug(tx_hash_obj) local_id = str(uuid4()) timestamp = int(time.time()) rest_logger.debug("Adding row to accounting_records_table") accounting_records_table.add_row({ 'token': token, 'cid': sha_payload_hash, 'localCID': local_id, 'txHash': tx_hash, 'timestamp': timestamp, 'confirmed': -1, }) return {'commitTx': tx_hash, 'recordCid': local_id} elif settings.file_upload_method == "filecoin": pow_client = PowerGateClient(settings.powergate_url, False) # if request.method == 'POST': payload_bytes = BytesIO(payload.encode('utf-8')) payload_iter = bytes_to_chunks(payload_bytes) # adds to hot tier, IPFS stage_res = pow_client.ffs.stage(payload_iter, token=token) rest_logger.debug('Staging level results:') rest_logger.debug(stage_res) # uploads to filecoin push_res = pow_client.ffs.push(stage_res.cid, token=token) rest_logger.debug('Cold tier finalization results:') rest_logger.debug(push_res) await request.app.redis_pool.publish_json('new_deals', { 'cid': stage_res.cid, 'jid': push_res.job_id, 'token': token }) payload_hash = '0x' + keccak(text=payload).hex() token_hash = '0x' + keccak(text=token).hex() tx_hash_obj = contract.commitRecordHash( **dict(payloadHash=payload_hash, apiKeyHash=token_hash)) tx_hash = tx_hash_obj[0]['txHash'] rest_logger.debug('Committed record append to contract..') rest_logger.debug(tx_hash_obj) local_id = str(uuid4()) timestamp = int(time.time()) rest_logger.debug("Adding row to accounting_records_table") # Add row to skydb print(f"Adding cid: {stage_res.cid}") accounting_records_table.add_row({ 'token': token, 'cid': stage_res.cid, 'localCID': local_id, 'txHash': tx_hash, 'confirmed': 0, 'timestamp': timestamp }) return {'commitTx': tx_hash, 'recordCid': local_id}
async def check_is_ready(request: Request, call_next: Callable) -> Response: if not state.ready: return Response(content="starting up...", status_code=503) response: Response = await call_next(request) return response
def signin(self, db: Session, request: Request, response: Response, *, obj_in: schema_user.UserPasswordSigninIn): """ 通过密码登录 :param account: 手机号/邮箱/用户名 :param password: 密码 :return: token, 提示信息 """ # -------- 判断输入账号 手机号/邮箱/用户名 ------- # if re.match(RE_PHONE, obj_in.account): # 手机 db_user = crud_user.get_user_by_phone(db, phone=obj_in.account) elif re.match(RE_EMAIL, obj_in.account): # 邮箱 db_user = crud_user.get_user_by_email(db, email=obj_in.account) else: # 用户名 db_user = crud_user.get_user_by_username(db, username=obj_in.account) # --------- 判断账号是否注册过 ---------- # if not db_user: message = f"账号 {obj_in.account} 未注册" response.status_code = status.HTTP_404_NOT_FOUND elif db_user.status == 1: message = f"账号 {obj_in.account} 已被禁用" response.status_code = status.HTTP_403_FORBIDDEN else: # ------ 判断用户是否设置初始密码 ------ # if not db_user.hashed_password: message = f"账号 {obj_in.account} 还未设置初始密码" response.status_code = status.HTTP_401_UNAUTHORIZED return None, message # --------- 密码进行比对 ----------- # if not security.verify_password(obj_in.password, db_user.hashed_password): message = f"用户 {obj_in.account} 密码不正确" response.status_code = status.HTTP_401_UNAUTHORIZED logger.error(message) else: # -------- # 登录token 存储了userid 和 authorityid ------- # access_token_expires = timedelta( minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) token = security.create_access_token( db_user.id, db_user.authorityid, expires_delta=access_token_expires) redis_client.set(db_user.id, token, ex=settings.ACCESS_TOKEN_EXPIRE_MINUTES) # -------- 登录成功之后向数据库添加一条登录日志信息 -------- # ip = request.client.host # 用户ip db_signin_log = crud_user.add_signin_log(db, userid=db_user.id, ip=ip) if not db_signin_log: message = f"用户 {obj_in.account} 登录失败" response.status_code = status.HTTP_401_UNAUTHORIZED else: message = f"用户 {obj_in.account} 登录成功" response.status_code = status.HTTP_200_OK return token, message logger.error(message) return None, message
async def index(session: AsyncSession = Depends(get_user_session)) -> Response: await session.set("test", "passed") await session.save() return Response(status_code=status.HTTP_200_OK)
async def index(session: AsyncSession = Depends(get_user_session)) -> Response: return Response(status_code=status.HTTP_200_OK)
def respond(stocks): response = Response() response.status_code = status.HTTP_200_OK response.body = json.dumps({"stock_list": stocks}).encode("utf-8") return response
def respond_with_error(message: str = "Erro ao buscar lista de ações"): response = Response() response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR response.body = json.dumps(dict(message=message)).encode("utf-8") return response
async def user_logout(response: Response): response.delete_cookie("token") # return RedirectResponse("/") # 不知为何这样子cookie就无法删除了 return 1
async def agent_data( uuid: UUID, *, certificate: str = Header(...), compression: str = Header(...), monitoring_data: UploadFile = File(...), ) -> Response: host = Host(uuid) if not host.registered: logger.error( "uuid=%s Host is not registered", uuid, ) raise HTTPException( status_code=HTTP_403_FORBIDDEN, detail="Host is not registered", ) if host.host_type is not HostTypeEnum.PUSH: logger.error( "uuid=%s Host is not a push host", uuid, ) raise HTTPException( status_code=HTTP_403_FORBIDDEN, detail="Host is not a push host", ) try: decompressor = Decompressor(compression) except ValueError: logger.error( "uuid=%s Unsupported compression algorithm: %s", uuid, compression, ) raise HTTPException( status_code=400, detail=f"Unsupported compression algorithm: {compression}", ) try: decompressed_agent_data = decompressor(monitoring_data.file.read()) except DecompressionError as e: logger.error( "uuid=%s Decompression of agent data failed: %s", uuid, e, ) raise HTTPException( status_code=400, detail="Decompression of agent data failed", ) from e try: _store_agent_data( host.source_path, decompressed_agent_data, ) except FileNotFoundError: # We only end up here in case someone re-configures the host at exactly the same time when # data is being pushed. To avoid internal server errors, we still handle this case. logger.error( "uuid=%s Host is not registered or not configured as push host.", uuid, ) raise HTTPException( status_code=403, detail="Host is not registered or not configured as push host", ) _move_ready_file(uuid) logger.info( "uuid=%s Agent data saved", uuid, ) return Response(status_code=HTTP_204_NO_CONTENT)
def signin_by_verify_code(self, db: Session, request: Request, response: Response, *, obj_in: schema_user.UserVerifyCodeSigninIn): """ 通过 短信/邮箱 验证码登录 :param account: 手机号/邮箱/用户名 :param verify_code: 验证码 :return: token, 提示信息 """ # --------- 比较验证码是否匹配 ----------- # redis_verify_code = redis_client.get(obj_in.account) if obj_in.verify_code != redis_verify_code: message = "验证码不正确或已过期" response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY logger.error(message) return None, message if re.match(RE_PHONE, obj_in.account): # 手机 db_user = crud_user.get_user_by_phone(db, phone=obj_in.account) elif re.match(RE_EMAIL, obj_in.account): # 邮箱 db_user = crud_user.get_user_by_email(db, email=obj_in.account) else: message = "账号输入有误,请重新输入" response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY logger.error(message) return None, message # ----------- 判断账号是否注册过 ----------- # userid = None authorityid = None if not db_user: if re.match(RE_PHONE, obj_in.account): # 手机 dict_obj_in = { "phone": obj_in.account, "hashed_password": None, "ip": request.client.host } db_user_obj = crud_user.create_by_phone(db, obj_in=dict_obj_in) elif re.match(RE_EMAIL, obj_in.account): # 邮箱 dict_obj_in = { "email": obj_in.account, "hashed_password": None, "ip": request.client.host } db_user_obj = crud_user.create_by_email(db, obj_in=dict_obj_in) if not db_user_obj: message = "登录失败" response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR logger.error(message) return None, message userid = db_user_obj.id # 将新增之后的 用户id 赋值给变量 authorityid = db_user_obj.authorityid # 默认权限 elif db_user.status == 1: message = f"账号 {obj_in.account} 已被禁用" response.status_code = status.HTTP_403_FORBIDDEN logger.error(message) return None, message if db_user: userid = db_user.id authorityid = db_user.authorityid # -------- # 登录token 存储了userid 和 authorityid ------- # access_token_expires = timedelta( minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) token = security.create_access_token( userid, authorityid, expires_delta=access_token_expires) redis_client.set(userid, token, ex=settings.ACCESS_TOKEN_EXPIRE_MINUTES) # --------- 登录成功之后向数据库添加一条登录日志信息 ---------- # ip = request.client.host # 用户ip signin_log_id = crud_user.add_signin_log(db, userid=userid, ip=ip) if not signin_log_id: message = f"用户 {obj_in.account} 登录失败" response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR logger.error(message) return None, message else: redis_client.set(obj_in.account, "", ex=1) # 使验证码失效 message = f"用户 {obj_in.account} 登录成功" response.status_code = status.HTTP_200_OK return token, message
async def index(): content = 'Welcome to Intellect Parse API' return Response( f'''<html><script src="https://code.jquery.com/jquery-3.1.1.min.js"></script><p id="welcome" style="user-select:none;display:none;font-weight:400;font-size:50px;position:absolute;left:50%;top:40%;transform:translate(-50%,-50%);">{content}</p><script language="javascript" type="text/javascript">$("#welcome").show("slow");</script></html>''' )
def signup(self, db: Session, request: Request, response: Response, *, obj_in: schema_user.UserSignupIn): """ 通过 手机号/邮箱 注册 :param account: 手机号/邮箱 :param verify_code: 验证码 :param password: 密码 :return: 用户信息, 提示信息 """ # --------- 校验验证码 --------- # redis_verify_code = redis_client.get(obj_in.account) if obj_in.verify_code != redis_verify_code: message = "验证码不正确或已过期" response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY return None, message # --------- 先查询一次,在进行注册 -------- # if re.match(RE_PHONE, obj_in.account): # 手机号 db_user = crud_user.get_user_by_phone(db, phone=obj_in.account) if db_user: message = f"账号 {obj_in.account} 已被注册" response.status_code = status.HTTP_400_BAD_REQUEST return None, message else: dict_obj_in = { "phone": obj_in.account, "hashed_password": security.get_password_hash(obj_in.password), "ip": request.client.host } db_create_user = crud_user.create_by_phone( db, obj_in=dict_obj_in) # 通过手机号进行注册 elif re.match(RE_EMAIL, obj_in.account): # 邮箱 db_user = crud_user.get_user_by_email(db, email=obj_in.account) if db_user: message = f"账号 {obj_in.account} 已被注册" response.status_code = status.HTTP_400_BAD_REQUEST return None, message else: dict_obj_in = { "email": obj_in.account, "hashed_password": security.get_password_hash(obj_in.password), "ip": request.client.host } db_create_user = crud_user.create_by_email( db, obj_in=dict_obj_in) # 通过邮箱进行注册 else: message = "账号输入有误" # 用户名 response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY logger.error(message) return None, message if not db_create_user: message = f"用户 {obj_in.account} 注册失败" response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR logger.error(message) else: message = f"用户 {obj_in.account} 注册成功" response.status_code = status.HTTP_201_CREATED redis_client.set(obj_in.account, "", ex=1) # 注册成功之后使验证码立即失效 return db_create_user, message
async def all_payloads(request: Request, response: Response, api_key_extraction=Depends(load_user_from_auth), start_index: Optional[int] = -1): rest_logger.debug('Api key extraction') rest_logger.debug(api_key_extraction) rest_logger.debug(start_index) if not api_key_extraction: response.status_code = status.HTTP_403_FORBIDDEN return {'error': 'Forbidden'} if not api_key_extraction['token']: response.status_code = status.HTTP_403_FORBIDDEN return {'error': 'Forbidden'} if start_index == -1: start_index = accounting_records_table.index - 1 if settings.file_upload_method == "sia": ffs_token = api_key_extraction['token'] return_json = dict() payload_list = list() records_rows = None while True: rest_logger.debug("Waiting for Lock") v = redis_lock.incr('my_lock') if v == 1: rest_logger.debug("Fetching data...") records_rows = accounting_records_table.fetch( condition={'token': ffs_token}, start_index=start_index, n_rows=2) v = redis_lock.decr('my_lock') break v = redis_lock.decr('my_lock') time.sleep(0.01) print(records_rows) for row_index in records_rows: rest_logger.debug(records_rows[row_index]) file_, payload = sia_get(records_rows[row_index]['cid']) payload_obj = { 'index': row_index, 'recordCid': records_rows[row_index]['localCID'], 'txHash': records_rows[row_index]['txHash'], 'timestamp': records_rows[row_index]['timestamp'], 'file_download': file_, } payload_list.append(payload_obj) return_json.update({'payloads': payload_list}) return return_json elif settings.file_upload_method == "filecoin": retrieval_mode = False if not retrieval: retrieval_mode = False else: if retrieval == 'true': retrieval_mode = True elif retrieval == 'false': retrieval_mode = False ffs_token = api_key_extraction['token'] return_json = dict() if retrieval_mode: row = None while True: rest_logger.debug("Waiting for Lock") v = redis_lock.incr('my_lock') if v == 1: row = retreivals_bulk_table.fetch( condition={'token': ffs_token}, start_index=retreivals_bulk_table.index - 1, n_rows=1) v = redis_lock.decr('my_lock') break v = redis_lock.decr('my_lock') time.sleep(0.1) if len(row) >= 1: row = row[next(iter(row.keys()))] if not row: request_id = str(uuid4()) request_status = 'Queued' retreivals_bulk_table.add_row({ 'requestID': request_id, 'api_key': api_key_extraction['api_key'], 'token': ffs_token, 'retreived_file': "", 'completed': 0 }) else: request_id = row['requestID'] request_status = 'InProcess' if int( row['completed']) == 0 else 'Completed' return_json.update({ 'requestId': request_id, 'requestStatus': request_status }) payload_list = list() records_rows = None while True: rest_logger.debug("Waiting for Lock") v = redis_lock.incr('my_lock') if v == 1: records_rows = accounting_records_table.fetch( condition={'token': ffs_token}, start_index=accounting_records_table.index - 1, n_rows=3) v = redis_lock.decr('my_lock') break v = redis_lock.decr('my_lock') time.sleep(0.1) rest_logger.debug(records_rows) for row_index in records_rows: payload_obj = { 'index': row_index, 'recordCid': records_rows[row_index]['localCID'], 'txHash': records_rows[row_index]['txHash'], 'timestamp': records_rows[row_index]['timestamp'] } confirmed = int(records_rows[row_index]['confirmed']) if confirmed == 0: # response.status_code = status.HTTP_404_NOT_FOUND payload_status = 'PendingPinning' elif confirmed == 1: payload_status = 'Pinned' elif confirmed == 2: payload_status = 'PinFailed' else: payload_status = 'unknown' payload_obj['status'] = payload_status payload_list.append(payload_obj) return_json.update({'payloads': payload_list}) return return_json
def login(response: Response, session_token=Depends(get_current_username)): response = RedirectResponse(url="/welcome") response.set_cookie(key="session_token", value=session_token) return response
async def download_request(response: Response, background_tasks: BackgroundTasks, url, token=None, format=None, subtitles=None, location=None, filename=None, presets=None): decoded_url = unquote(url) decoded_presets = [] # from string to list selected_presets_objects = [ ] # store presets objects required by the presets field if presets is not None: decoded_presets = presets.split(',') selected_presets_objects = ydl_utils.existing_presets( decoded_presets) # transform string in object user = None if params.enable_users_management and token is not None: user = ydl_utils.find_associated_user(unquote(token)) if params.enable_users_management and user is None: logging.warning( f'An unauthorized user tried to download {decoded_url}') response.status_code = 401 # unauthorized return {'status_code': response.status_code} query_parameters = { # parameters object build form url query parameters 'format': unquote(format) if format is not None else None, 'subtitles': unquote(subtitles) if subtitles is not None else None, 'location': unquote(location) if location is not None else None, 'filename': unquote(filename) if filename is not None else None, 'presets': unquote(presets) if presets is not None else None } # override location setting of the preset for the current user if params.enable_users_management and user.get( 'force_location') is not None: query_parameters['location'] = user.get('force_location') # generate all options sets for all download downloads_options_sets = ydl_utils.generate_ydl_options_sets( decoded_url, selected_presets_objects, query_parameters, user) # count the number of check downloads and the number of errors validity_check = ydl_utils.recap_all_downloads_validity( downloads_options_sets) # if all downloads were checked and without errors, we can ensure the file will be correctly downloaded if validity_check.get('checked') == validity_check.get( 'total') and validity_check.get('errors') == 0: background_tasks.add_task(ydl_utils.launch_downloads, decoded_url, downloads_options_sets) response.status_code = 200 # request ok # if not all downloads were checked, we can't ensure all files will be correctly downloaded elif validity_check.get('checked') != validity_check.get('total'): background_tasks.add_task(ydl_utils.launch_downloads, decoded_url, downloads_options_sets) response.status_code = 202 # request ok but result not granted # if all downloads are in error, we can ensure no file will be downloaded else: logging.error(f'Impossible to download \'{decoded_url}\'') response.status_code = 400 # bad request return { 'status_code': response.status_code, 'url': decoded_url, 'presets_errors': (len(decoded_presets) - len(selected_presets_objects)), 'list': downloads_options_sets, 'youtube-dl_version': youtube_dl.version.__version__ }
def login(user: str, password: str, response: Response): session_token = sha256( f"{user}{password}{app.secret_key}".encode()).hexdigest() app.access_tokens.append(session_token) response.set_cookie(key="session_token", value=session_token) return {"message": "Welcome"}
def get_headers(response: Response): response.headers["X-Cat-Dog"] = "alone in the world" return {"message": "Hello World"}
def createEmployeeKey(emp: Employee, key: Optional[str] = Header(None)): print("Received Key as Header: ", key) empService.createEmployee(emp) resData = "Employee key accepted and created successfully" return Response(content=resData, status_code=200, media_type="plain/text")
def make_data_response(data: pd.DataFrame): data = compatibility.with_index_names(data, queryset.loa) bytes_buffer = io.BytesIO() data.to_parquet(bytes_buffer,compression="gzip") return Response(bytes_buffer.getvalue(),media_type="application/octet-stream")
async def twitter_delete(twitter=Depends(get_twitter)): spawner.stop(twitter) await twitter.delete() return Response(status_code=status.HTTP_204_NO_CONTENT)
def show_patient(pk: int, is_logged: bool = Depends(is_logged)): if pk in app.storage: return app.storage.get(pk) return Response(status_code=status.HTTP_204_NO_CONTENT)
if not (edition := cmk_edition(credentials)).supports_registration_with_labels(): logger.error( "uuid=%s Registration with labels not supported", registration_body.uuid, ) raise HTTPException( status_code=HTTP_501_NOT_IMPLEMENTED, detail= f"The Checkmk {edition.value} edition does not support registration with agent labels", ) _write_registration_file( credentials.username, registration_body, ) return Response(status_code=HTTP_204_NO_CONTENT) def _store_agent_data( target_dir: Path, decompressed_data: bytes, ) -> None: with tempfile.NamedTemporaryFile( dir=target_dir, delete=False, ) as temp_file: try: temp_file.write(decompressed_data) os.rename(temp_file.name, target_dir / "agent_output") finally: Path(temp_file.name).unlink(missing_ok=True)
async def currTime(response: Response): response.headers['Content-Type'] = 'text/plain' return (int(time.time()))
def options(response: Response): response.headers["Access-Control-Allow-Origin"] = "http://127.0.0.1:8001" response.headers["Access-Control-Allow-Methods"] = "GET,PUT,OPTIONS"