async def forward_message( token: str = TOKEN_VALIDATION, chat_id: Union[int, str] = Query(..., description='Unique identifier for the target chat or username of the target channel (in the format @channelusername)'), from_chat_id: Union[int, str] = Query(..., description='Unique identifier for the chat where the original message was sent (or channel username in the format @channelusername)'), message_id: int = Query(..., description='Message identifier in the chat specified in from_chat_id'), disable_notification: Optional[bool] = Query(None, description='Sends the message silently. Users will receive a notification with no sound.'), ) -> JSONableResponse: """ Use this method to forward messages of any kind. On success, the sent Message is returned. https://core.telegram.org/bots/api#forwardmessage """ from ....main import _get_bot bot = await _get_bot(token) try: entity = await get_entity(bot, chat_id) except ValueError: raise HTTPException(404, detail="chat not found?") # end try result = await bot.forward_messages( entity=entity, messages=[InputMessageID(id=message_id)], from_peer=from_chat_id, silent=disable_notification, ) data = await to_web_api(result, bot) return r_success(data.to_array())
async def send_chat_action( token: str = TOKEN_VALIDATION, chat_id: Union[int, str] = Query(..., description="Unique identifier for the target chat or username of the target channel (in the format @channelusername)", regex=r"@[a-zA-Z][a-zA-Z0-9_]{2,}"), action: ChatAction = Query(..., description='Type of action to broadcast. Choose one, depending on what the user is about to receive: "typing" for text messages, "upload_photo" for photos, "record_video" or "upload_video" for "videos", "record_audio" or "upload_audio" for audio files, "upload_document" for general files, "find_location" for location data, "record_video_note" or "upload_video_note" for video notes. Additionally added by this API implementation are "play_game", "choose_contact" and "cancel".'), ): from ....main import _get_bot bot = await _get_bot(token) try: entity = await get_entity(bot, chat_id) # end try except ValueError: raise HTTPException(404, detail="chat not found?") # end try client = bot action: str = actions_api_to_telethon_mapping[action] # noinspection PyProtectedMember action: TypeSendMessageAction = _ChatAction._str_mapping[action.lower()] await client( request=SetTypingRequest( peer=entity, action=action, ) ) return r_success()
async def get_uniprot( qualifier: Any = Path( ..., description="UniProtKB accession number (AC) or entry name (ID)" ), provider: Optional[Any] = Query( None, enum=["swissmodel", "genome3d", "foldx", "pdb"] ), template: Optional[Any] = Query( None, description="Template is 4 letter PDB code, or 4 letter code with " "assembly ID and chain for SMTL entries", ), res_range: Optional[Any] = Query( None, description="Specify a UniProt sequence residue range", pattern="^[0-9]+-[0-9]+$", alias="range", ), ): """Returns experimental and theoretical models for a UniProt accession or entry name Args: qualifier (str): UniProtKB accession number (AC) or entry name (ID). provider (str, optional): Data provider template (str, optional): 4 letter PDB code, or 4 letter code with assembly ID and chain for SMTL entries res_range (str, optional): Residue range Returns: Result: A Result object with experimental and theoretical models. """ services = get_services(service_type="uniprot", provider=provider) calls = [] for service in services: base_url = get_base_service_url(service["provider"]) final_url = base_url + service["accessPoint"] + f"{qualifier}.json?" if res_range: final_url = f"{final_url}range={res_range}" calls.append(final_url) result = await send_async_requests(calls) final_result = [x.json() for x in result if x and x.status_code == 200] if not final_result: return JSONResponse(content={}, status_code=status.HTTP_404_NOT_FOUND) final_structures: List[Structure] = [] uniprot_entry: UniProtEntry = UniProtEntry(**final_result[0]["uniprot_entry"]) for item in final_result: final_structures.extend(item["structures"]) api_result: Result = Result( **{"uniprot_entry": uniprot_entry, "structures": final_structures} ) return api_result
async def get_courses( semester_id: str = Path( None, example="202101", description="The id of the semester, determined by the Registrar.", ), include_sections: bool = Query( False, description="Populate `sections` for each course."), include_periods: bool = Query( True, description="`NOT YET IMPLEMENTED` Populate `periods` of each section (only checked if `include_sections` is True)"), title: Optional[str] = Query(None, description="`NOT YET IMPLEMENTED`"), days: Optional[List[str]] = Query( None, description="`NOT YET IMPLEMENTED`"), subject_prefix: Optional[str] = Query( None, description="`NOT YET IMPLEMENTED`"), number: Optional[str] = Query(None, description="`NOT YET IMPLEMENTED`"), limit: int = Query( 10, description="The maximum number of course sections to return in the response. Max: 50", gt=0, lt=51, ), offset: int = Query( 0, description="The number of course sections in the response to skip." ), conn: RealDictConnection = Depends(postgres_pool.get_conn) ): courses = fetch_courses_without_sections(conn, semester_id, limit, offset) if include_sections: populate_course_periods(conn, semester_id, courses, include_sections) return courses
def for_two_page( keyword: str = Query('600050'), session: Session = Depends(get_db_session)): key = ts_code_or_name(keyword) result1 = session.execute( "select `trade_date` as date, open, high, low, close, vol from `tb_daily` where `ts_code` = '" + key + "' order by `date` asc;" ) day_x_data, day_y_open, day_y_high, day_y_low, day_y_close, day_vol = [], [], [], [], [], [] for date, open, high, low, close, vol in result1.fetchall(): day_x_data.append(date) day_y_open.append(open) day_y_high.append(high) day_y_low.append(low) day_y_close.append(close) day_vol.append(vol) result = zip(day_x_data, day_y_open, day_y_close, day_y_low, day_y_high, day_vol) list_result = [] for i in result: list_result.append(list(i)) return list_result
def search( *, common: dict = Depends(common_parameters), type: List[SearchTypes] = Query(..., alias="type[]"), ): """Perform a search.""" if common["query_str"]: models = [get_class_by_tablename(t) for t in type] results = composite_search( db_session=common["db_session"], query_str=common["query_str"], models=models, current_user=common["current_user"], ) # add a filter for restricted incidents # TODO won't currently show incidents that you are a member admin_projects = [] for p in common["current_user"].projects: if p.role == UserRoles.admin: admin_projects.append(p) filtered_incidents = [] for incident in results["Incident"]: if incident.project in admin_projects or incident.visibility == Visibility.open: filtered_incidents.append(incident) results["Incident"] = filtered_incidents else: results = [] return SearchResponse(**{"query": common["query_str"], "results": results}).dict(by_alias=False)
def for_three_page( keyword: str = Query('600050'), session: Session = Depends(get_db_session) ): key = ts_code_or_name(keyword) # todaytime = function.getYesterday() todaytime = '20200102' result1 = session.execute( "select `volume_ratio`, `pe`, `pb`, `total_share`, `total_mv` from `tb_daily_basic` where `ts_code` = '" + key + "' and trade_date = '" + todaytime + "';" ) # result2 = session.execute( # "select max(`total_share`), max(`total_mv`) from `tb_daily_basic` group by `ts_code` = '" + key + "' ;" "selsect max(total_share), max(totsl_mv) from `tb_daily_basic` ;" # ) # max_ts, max_tm = result2.fetchall() vr, pe, pb, ts, tm = [], [], [], [], [] for data_vr, data_pe, data_pb, data_ts, data_tm in result1.fetchall(): vr.append(data_vr) pe.append(data_pe) pb.append(data_pb) ts.append(data_ts) tm.append(data_tm) # ts_ratio = ts / max_ts * 100 # tm_ratio = tm / max_tm * 100 return {'vr': vr, 'pe': pe, 'pb': pb, 'ts': ts, 'tm': tm}
async def form_get_post(request: Request, phone: Optional[str] = Query(None, max_length=50)): """ :param request: :param phone: :return: """ req_text = await check_applications(env) # logger.info(f"form_get_post::req_text: {len(req_text[1])}") if len(req_text[1]) > 2: try: req = list(dict(json.loads(str(req_text[1])[1:-1])).items()) rq = ';'.join([str(r) for r in req if not r[1] == []]) except Exception as e: req_text = (200, '[{"name":"None","channel_ids":["None"]}]') req = list(dict(json.loads(str(req_text[1])[1:-1])).items()) rq = ';'.join([str(r) for r in req if not r[1] == []]) # print(f"{e}") logger.exception(f"{e}") else: req_text = (200, '[{"name":"None","channel_ids":["None"]}]') req = list(dict(json.loads(str(req_text[1])[1:-1])).items()) rq = ';'.join([str(r) for r in req if not r[1] == []]) # print(f"rq: {rq}") logger.info(f"rq: {rq}") return templates.TemplateResponse('form.html', context={ 'request': request, 'result': rq })
async def get_individual_metadata_template_for_seqr( project: str, export_type: FileExtension, external_participant_ids: Optional[List[str]] = Query( default=None), # type: ignore[assignment] # pylint: disable=invalid-name replace_with_participant_external_ids: bool = True, connection: Connection = get_project_readonly_connection, ): """Get individual metadata template for SEQR as a CSV""" participant_layer = ParticipantLayer(connection) assert connection.project rows = await participant_layer.get_seqr_individual_template( project=connection.project, external_participant_ids=external_participant_ids, replace_with_participant_external_ids= replace_with_participant_external_ids, ) output = io.StringIO() writer = csv.writer(output, delimiter=export_type.get_delimiter()) writer.writerows(rows) basefn = f'{project}-{date.today().isoformat()}' ext = export_type.get_extension() return StreamingResponse( iter(output.getvalue()), media_type=export_type.get_mime_type(), headers={'Content-Disposition': f'filename={basefn}{ext}'}, )
def read_users( *, session: Session = Depends(get_session), offset: int = 0, limit: int = Query(default=100, lte=100), ): users = session.exec(select(User).offset(offset).limit(limit)).all() return users
def read_topics( *, session: Session = Depends(get_session), offset: int = 0, limit: int = Query(default=100, lte=100), ): topics = session.exec(select(Topic).offset(offset).limit(limit)).all() return topics
async def get_token( websocket: WebSocket, token: Any = Query(None), ) -> Optional[str]: logger.info("TOKEN:" + str(token)) if token is None or not userService.check_token(token): await websocket.close(code=status.WS_1008_POLICY_VIOLATION) return None return token
def post_items_all_params( item_id: str = Path(...), body: Item = Body(...), query_a: int = Query(None), query_b=Query(None), coo: str = Cookie(None), x_head: int = Header(None), x_under: str = Header(None, convert_underscores=False), ): return { "item_id": item_id, "body": body, "query_a": query_a, "query_b": query_b, "coo": coo, "x_head": x_head, "x_under": x_under, }
def read_courses(*, session: Session = Depends(get_session), offset: int = 0, limit: int = Query(default=100, lte=100), current_user: User = Depends(deps.get_current_user)): courses = session.exec( select(Course).filter(Course.user_id == current_user.id).offset( offset).limit(limit)).all() return courses
def read_assignments(*, session: Session = Depends(get_session), course_id: int, offset: int = 0, limit: int = Query(default=100, lte=100), current_user: User = Depends(deps.get_current_user)): assignments = session.exec( select(Assignment).where(Assignment.course_id == course_id).offset( offset).limit(limit)).all() return assignments
def for_five_page( keyword: str = Query('600050') ): key = ts_code_or_name(keyword) re_str = r'(\d*).\D\D' result = findall(re_str, key) # print("result:", result, type(result)) list1 = reptile.public_opinion(result[0]) # print("list1:", list1, type(list1)) return {'list': list1}
async def _ref_ngramm_author_stat( topn: Optional[int] = None, author: Authors = Query(...), ngrammParam: NgrammParam = Depends(depNgrammParamReq), _debug_option: Optional[DebugOption] = None, slot: Slot = Depends(Slot.req2slot)): pipeline = get_top_ngramm_author_stat(topn, author, ngrammParam) if _debug_option == DebugOption.pipeline: return pipeline publications: Collection = slot.mdb.publications curs = publications.aggregate(pipeline, allowDiskUse=True) if _debug_option == DebugOption.raw_out: return [cont async for cont in curs] out = [] get_at = itemgetter('atype', 'cnt_tot') async for cont in curs: atypes = cont.pop('atypes') cnt = Counter() cnt_tot = Counter() cnt_unq = Counter() for atc in atypes: ats, cnt_tots = get_at(atc) for at in ats: cnt[at] += 1 cnt_tot[at] += cnt_tots first_at = ats[0] cnt_unq[first_at] += 1 cnt_all = cont.pop('cnt') cnt_all_tot = cont.pop('cnt_tot') cnt_dbl_all = sum(cnt.values()) cont['cnt'] = dict(all=cnt_all, **cnt, **{f'{k}_unk': v for k, v in cnt_unq.items()}, **{ f'{k}_proc': round((v / cnt_all) * 100, 3) for k, v in cnt.items() }, **{ f'{k}_dbl_proc': round((v / cnt_dbl_all) * 100, 3) for k, v in cnt.items() }) # cont['cnt_tot'] = dict( # all=cnt_all_tot, **cnt_tot, # **{f'{k}_proc': round((v/cnt_all_tot)*100, 3) for k, v in cnt_tot.items()}) out.append(cont) return out
def get_users(customer_id: Optional[List[int]] = Query(None)): rfm = load_dataset(dataset_path) # ids = list(rfm[rfm['customer_id'].isin(customer_id)]['customer_id'].values) segments = list( rfm[rfm['customer_id'].isin(customer_id)]["segment"].values) response = {"customer_id": customer_id, "segment": segments} # response = {"ids": ids} return response
async def export_to_csv(metadata_field: List[str] = Query(default=['added_by', 'information', 'date']), dialect: str = Query(default='excel'), app: HeksherManagement = application): ret = StringIO(newline='') # get all context features context_features = await app.heksher_client.get_context_features() field_names = ['setting', *context_features, 'value', *metadata_field] writer = csv.DictWriter(ret, field_names, dialect=dialect, extrasaction='ignore') writer.writeheader() # get all settings all_settings = await app.heksher_client.get_setting_names() # get all rules for settings all_rules = await app.heksher_client.get_settings_rules(all_settings) for setting, ruleset in sorted(all_rules.items()): for rule in ruleset: row = {'setting': setting, 'value': rule['value'], **rule['metadata'], **dict(rule['context_features'])} writer.writerow(row) return ExportCSVOutput(csv=ret.getvalue())
async def create_user(no_registration: bool = Query(False), create_data: Dict[str, Any] = Body(...), user: UserInfo = Depends(Authentication())): """Updates user data.""" is_admin = 'admin' in user['roles'] if not is_admin: raise HTTPException(401) user_data = DotDict() await _update_user(user_data, create_data, is_new=True, is_admin=True, no_registration=no_registration)
async def put_new_person_and_friends( response: Response, person: str, friends: List[str] = Query(["Gustavo"]), ): """ Add new person to network graph """ graph_svc = GraphService(g().friends) saved = graph_svc.save_new_person(person=person, friends=friends) if not saved: raise HTTPException(HTTP_400_BAD_REQUEST) return {"success": saved}
async def get_sections( semester_id: str = Path( None, example="202101", description="The id of the semester, determined by the Registrar.", ), crns: List[CRN] = Query( ..., description="The direct CRNs of the course sections to fetch.", example=["42608"], ), conn: RealDictConnection = Depends(postgres_pool.get_conn)): """Directly fetch course sections from CRNs.""" return fetch_course_sections(conn, semester_id, crns)
async def end_session( id_token_hint: Optional[str] = Query(None), post_logout_redirect_uri: Optional[str] = Query(None), state: Optional[str] = Query(None), sid: Optional[str] = Cookie(None, alias=COOKIE_KEY_SID), referer: Optional[str] = Header(None), ): """Ends the session.""" if sid is not None: await async_session_collection.delete_one({'_id': sid}) if id_token_hint is not None: await async_token_collection.delete_one( {'access_token': id_token_hint}) if post_logout_redirect_uri is not None: if state is not None: if '#' in post_logout_redirect_uri: post_logout_redirect_uri, post_logout_redirect_hash = post_logout_redirect_uri.split( '#', 1) post_logout_redirect_hash = '#' + post_logout_redirect_hash else: post_logout_redirect_hash = '' if '?' in post_logout_redirect_uri[:-1]: post_logout_redirect_uri += '&state=' + state else: post_logout_redirect_uri += '?state=' + state post_logout_redirect_uri += post_logout_redirect_hash elif referer is not None: post_logout_redirect_uri = referer else: post_logout_redirect_uri = '' response = RedirectResponse( status_code=302, headers={'Location': post_logout_redirect_uri}, ) response.delete_cookie(COOKIE_KEY_SID) response.delete_cookie(COOKIE_KEY_STATE) return response
async def retrieve_vacancy_collection( db: Session = Depends(dependencies.get_db), skip: int = Query(0, description="Query offset"), limit: int = Query(10, description="Query limit"), s: Optional[List[Specialization]] = Query( None, description="Vacancy.specializations filter criterion" ), l: Optional[List[Location]] = Query( None, description="Vacancy.locations filter criterion" ), st: Optional[Salary] = Query( None, description="Vacancy.salary_from filter criterion" ), ) -> Any: """ Retrieve Vacancy collection """ count, vacancies = crud.vacancy.retrieve_collection( db, skip=skip, limit=limit, s=s, l=l, st=st ) return { "count": count, "items": vacancies, }
async def root(cube_str: str = Query(..., alias="cube", min_length=54, max_length=54)): print(cube_str) cube: Cube solver: KociembaSolver try: cube = Cube(cube_str) except ValueError as e: return {"error": str(e)} try: solver = KociembaSolver(cube) solver.solve() except Exception as e: return {"error": str(e)} return {"cube": str(cube), "moves": solver.moves, "timeToSolve": solver.time_to_solve}
def for_one_page( keyword: str = Query('600050'), session: Session = Depends(get_db_session) ): print(keyword) key = ts_code_or_name(keyword) result1 = session.execute( "select `trade_date` as date, volume_ratio from `tb_daily_basic` where `ts_code` = '" + key + "' order by `date` asc;" ) day_x_data, day_y_data, week_x_data, week_y_data, month_x_data, month_y_data = [], [], [], [], [], [] for date, volume_ratio in result1.fetchall(): day_x_data.append(date) day_y_data.append(volume_ratio) return {'dayxData': day_x_data, 'dayyData': day_y_data}
def __init__( self, grant_type: Optional[str] = Query(None), code: Optional[str] = Query(None), redirect_uri: Optional[str] = Query(None), client_id: Optional[str] = Query(None), client_secret: Optional[str] = Query(None), code_verifier: Optional[str] = Query(None), username: Optional[str] = Query(None), password: Optional[str] = Query(None), ): self.grant_type = grant_type self.code = code self.redirect_uri = redirect_uri self.client_id = client_id self.client_secret = client_secret self.code_verifier = code_verifier self.username = username self.password = password
def __init__( self, response_type: str = Query(...), client_id: str = Query(...), redirect_uri: Optional[str] = Query(None), scope: Optional[str] = Query(None), code_challenge: Optional[str] = Query(None), code_challenge_method: Optional[str] = Query(None), state: Optional[str] = Query(None), ): self.response_type = response_type self.client_id = client_id self.redirect_uri = redirect_uri self.scope = scope self.code_challenge = code_challenge self.code_challenge_method = code_challenge_method self.state = state
async def send_message( token: str = TOKEN_VALIDATION, chat_id: Union[int, str] = Query(..., description='Unique identifier for the target chat or username of the target channel (in the format @channelusername)'), text: str = Query(..., description='Text of the message to be sent'), parse_mode: Optional[str] = Query(None, description="Send Markdown or HTML, if you want Telegram apps to show bold, italic, fixed-width text or inline URLs in your bot's message."), disable_web_page_preview: Optional[bool] = Query(None, description='Disables link previews for links in this message'), disable_notification: Optional[bool] = Query(None, description='Sends the message silently. Users will receive a notification with no sound.'), reply_to_message_id: Optional[int] = Query(None, description='If the message is a reply, ID of the original message'), reply_markup: Optional[Union[Json['InlineKeyboardMarkupModel'], Json['ReplyKeyboardMarkupModel'], Json['ReplyKeyboardRemoveModel'], Json['ForceReplyModel']]] = Query(None, description='Additional interface options. A JSON-serialized object for an inline keyboard, custom reply keyboard, instructions to remove reply keyboard or to force a reply from the user.'), ) -> JSONableResponse: """ Use this method to send text messages. On success, the sent Message is returned. https://core.telegram.org/bots/api#sendmessage """ # model loading and verification reply_markup: Optional[Union[InlineKeyboardMarkupModel, ReplyKeyboardMarkupModel, ReplyKeyboardRemoveModel, ForceReplyModel]] = parse_obj_as( type_=Optional[Union[InlineKeyboardMarkupModel, ReplyKeyboardMarkupModel, ReplyKeyboardRemoveModel, ForceReplyModel]], obj=reply_markup, ) buttons = await to_telethon(reply_markup, None) from ....main import _get_bot bot = await _get_bot(token) try: entity = await get_entity(bot, chat_id) await bot.get_dialogs() except BotMethodInvalidError: assert isinstance(chat_id, int) or (isinstance(chat_id, str) and len(chat_id) > 0 and chat_id[0] == '@') entity = chat_id except ValueError: raise HTTPException(404, detail="chat not found?") # end try msg = await bot.send_message( entity=entity, message=text, parse_mode=parse_mode, link_preview=not disable_web_page_preview, silent=disable_notification, reply_to=reply_to_message_id, buttons=buttons, ) data = await to_web_api(msg, bot) return r_success(data.to_array())
async def search_sections( semester_id: str = Path( None, example="202101", description="The id of the semester, determined by the Registrar.", ), course_subject_prefix: Optional[str] = Query(None), course_number: Optional[str] = Query(None), course_title: Optional[str] = Query(None), days: Optional[List[str]] = Query(None, title="Meeting days", description="`NOT YET IMPLEMENTED`"), has_seats: Optional[bool] = Query(None, title="Has open seats"), limit: int = Query( 10, description= "The maximum number of course sections to return in the response. Max: 50", gt=0, lt=51, ), offset: int = Query( 0, description="The number of course sections in the response to skip." ), conn: RealDictConnection = Depends(postgres_pool.get_conn)): """ Search course sections with different query parameters. Always returns a paginated response. """ return search_course_sections( conn, semester_id, limit, offset, course_subject_prefix=course_subject_prefix, course_number=course_number, course_title=course_title, has_seats=has_seats, )