def handle_repodata_package( channel, files_metadata, dao, auth, force, pkgstore, config, ): from quetz.main import pm channel_name = channel.name proxylist = channel.load_channel_metadata().get('proxylist', []) user_id = auth.assert_user() total_size = 0 for file, package_name, metadata in files_metadata: parts = file.filename.rsplit("-", 2) if len(parts) != 3: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f"package file name has wrong format {file.filename}", ) else: package_name = parts[0] auth.assert_upload_file(channel_name, package_name) if force: auth.assert_overwrite_package_version(channel_name, package_name) # workaround for https://github.com/python/cpython/pull/3249 if type(file.file) is SpooledTemporaryFile and not hasattr( file, "seekable"): file.file.seekable = file.file._file.seekable file.file.seek(0, os.SEEK_END) size = file.file.tell() total_size += size file.file.seek(0) dao.assert_size_limits(channel_name, total_size) @retry( stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10), after=after_log(logger, logging.WARNING), ) def _upload_package(file, channel_name, subdir): dest = os.path.join(subdir, file.filename) try: file.file.seek(0) logger.debug( f"uploading file {dest} from channel {channel_name} to package store" ) pkgstore.add_package(file.file, channel_name, dest) except AttributeError as e: logger.error(f"Could not upload {file}, {file.filename}. {str(e)}") raise TryAgain pkgstore.create_channel(channel_name) nthreads = config.general_package_unpack_threads with TicToc("upload file without extracting"): with ThreadPoolExecutor(max_workers=nthreads) as executor: for file, package_name, metadata in files_metadata: if proxylist and package_name in proxylist: # skip packages that should only ever be proxied continue subdir = get_subdir_compat(metadata) executor.submit(_upload_package, file, channel_name, subdir) with TicToc("add versions to the db"): for file, package_name, metadata in files_metadata: version = create_version_from_metadata(channel_name, user_id, package_name, metadata, dao) condainfo = CondaInfo(file.file, package_name, lazy=True) pm.hook.post_add_package_version(version=version, condainfo=condainfo) file.file.close()
def read_primer(primer_id: int, db: Session = Depends(get_db)): db_primer = crud.get_primer(db, primer_id=primer_id) if db_primer is None: raise HTTPException(status_code=404, detail="Primer not found") return db_primer
def validate_table(table_name): """Validate table name.""" if table_name not in TABLES: raise HTTPException(400, f"Invalid table '{table_name}'")
async def authenticate(token: str): valid = pypale.valid_token(token) print(valid) if not valid: raise HTTPException(status_code=401, detail="Forbidden")
def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)): db_user = crud.get_user_by_email(db, email=user.email) if db_user: raise HTTPException(status_code=400, detail="Email already registered") return crud.create_user(db=db, user=user)
def psutil_route(req: Psutil_API): res = psutil_template.psutil_controller(req) if isinstance(res, str): raise HTTPException(400, res) return res
def patient_view(id: int): if id < 1: raise HTTPException(status_code=400) if id in router.mock_db: return router.mock_db[id] raise HTTPException(status_code=404)
async def submit_order( order_in: OrderCreate, current_user: MarketUser = Depends(require_active_user) ): """用户下单""" # get_prod_info if order_in.product_type == int(ProductType.qstrategy): product = await QStrategy.get(order_in.product_id) snapshot = QStrategyInfo(**product.__dict__).dict() else: product = await StrategyPackage.get(order_in.product_id) snapshot = PkgInfo(**product.__dict__).dict() if not product: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="没找到对应的商品(策略 / 套餐)", ) # calculate price if product.status != int(ListStatus.online): raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="商品(策略 / 套餐)已下线", ) has_price = False # 订单金额 total_cash = 0 # 订购总时长 total_days = order_in.days + order_in.gift_days # 优惠券抵扣金额 coupon_cash = 0 # 优惠券抵扣时长 coupon_days = 0 # 开启折扣,且订单的赠送时长为 0 # 毫秒时间戳 now = datetime.datetime.now().timestamp() * 1000 if product.enable_discount and order_in.gift_days == 0: for discount_info in product.discount_info: if now < discount_info.start_ts or now > discount_info.end_ts: continue if discount_info.day == order_in.days: total_cash = discount_info.price has_price = True break if not has_price: for price_info in product.period_prices: if price_info.get("day", 0) == order_in.days: # and price_info.get("gift_day", 0) == order_in.gift_days total_cash = price_info.get("price", 0) has_price = True break if not has_price: total_cash = product.buyout_price has_price = True # raise HTTPException( # status_code=status.HTTP_404_NOT_FOUND, detail="未找到商品(策略 / 套餐)针对该时长的价格信息", # ) #if product.allow_coupon and order_in.coupons: # for coupon_id in order_in.coupons: # # TODO: 判断该优惠券是否启用,用户是否拥有该优惠券 # pass #print('进入coupons1111111') # return { # "total_cash": total_cash, # "total_days": total_days, # "coupon_cash": coupon_cash, # "coupon_days": coupon_days, # } expire_dt = datetime.datetime.now() + datetime.timedelta( days=total_days + coupon_days ) #print(expire_dt,'dt-22222222222222222222') #print(order_in,'coupons-4444444444444') #print(order_in.dict(),'dict----user_order') order_in_dict = order_in.dict() user_order = await UserOrder.create( **order_in.dict(), #product_id=order_in_dict['product_id'], #product_type=order_in_dict["product_type"], #days=order_in_dict["days"], #gift_days=order_in_dict["gift_days"], #source=order_in_dict["source"], #coupons=order_in_dict["coupons"], #pay_method=order_in_dict["pay_method"], user_id=current_user.id, total_cash=total_cash, total_days=total_days, coupon_days=coupon_days, coupon_cash=coupon_cash, pay_cash=max(0, total_cash - coupon_cash), payed_cash=0, product_snapshot=jsonable_encoder(snapshot), expire_dt=expire_dt, status = 1, create_dt = datetime.datetime.now(), update_dt=datetime.datetime.now(), foreign_order_id='1', pay_id='1', pay_url='1', delete=False, coupon=[] # coupon=[], # TODO: expire_dt 在完成支付时设置 ) #print('进入user_order') #print(user_order,'user_order') #print(user_order.__dict__,'dict--3333333333333') # TODO: get pay url return OrderInfo(**user_order.__dict__["__values__"])
async def get_token_header(x_token: str = Header(...)): if x_token != "fake-super-secret-token": raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="X-Token header invalid", )
async def get_countries(index_id: CountryBoolIn): country_in_db = get_list(index_id.index_id) if country_in_db == None: raise HTTPException(status_code=404, detail=message(2, "paises")) else: return country_in_db
async def get_list_person(): person_in_db = get_person_list() if person_in_db == None: raise HTTPException(status_code=404, detail=message(2, "usuarios")) else: return person_in_db
def delete_user(user_id: int, db: Session = Depends(get_db)): db_user = crud.delete_user(db, user_id=user_id) if db_user is None: raise HTTPException(status_code=404, detail="User not found") return db_user
def delete_item(item_id: int, db: Session = Depends(get_db)): db_item = crud.delete_item(db, item_id=item_id) if db_item is None: raise HTTPException(status_code=404, detail="User not found") return db_item
async def fetch_static_data(city: str, statecode: str): """ Static city-level data for 135 US cities. Dataset compiled of rental price estimates, walkscores, population, and most prevelant job industry for each city. 📈 ## Path Parameters `city`: The name of a U.S. city; e.g. `Atlanta` or `Los Angeles` `statecode`: The [USPS 2 letter abbreviation](https://en.wikipedia.org/wiki/List_of_U.S._state_and_territory_abbreviations#Table) (case insensitive) for any of the 50 states or the District of Columbia. ## Response JSON string of various city statistics for 135 US Cities. """ query = """ SELECT * FROM static """ columns = [ "city", "state", "studio", "onebr", "twobr", "threebr", "fourbr", "walkscore", "population", "occ_title", "hourly_wage", "annual_wage", "climate_zone", "simple_climate" ] df = pd.read_json(fetch_query(query, columns)) # Input sanitization city = city.title() statecode = statecode.lower().upper() # Handle Edge Cases: # saint if city[0:5] == "Saint": city = city.replace("Saint", "St.") elif city[0:3] == "St ": city = city.replace("St", "St.") # fort elif city[0:3] == "Ft ": city = city.replace("Ft", "Fort") elif city[0:3] == "Ft.": city = city.replace("Ft.", "Fort") # multiple caps elif city[0:2] == 'Mc': city = city[:2] + city[2:].capitalize() # Find matching metro-area in database match = df.loc[(df.city.str.contains(city)) & (df.state.str.contains(statecode))] # Raise HTTPException for unknown inputs if len(match) < 1: raise HTTPException( status_code=404, detail= f'{city}, {statecode} not found or lacked enough data to be included here!' ) # DF to dictionary pairs = match.to_json(orient='records') return pairs
def wmi_route(req: WMI_API): res = wmi_template.wmi_controller(req) if isinstance(res, str): raise HTTPException(400, res) return res
def read_routing_group(routing_group_id: int, db: Session = Depends(get_db)): db_routing_group = service.get_routing_group(db, routing_group_id=routing_group_id) if db_routing_group is None: raise HTTPException(status_code=404, detail="RoutingGroup not found") return db_routing_group
def win32_route(req: Win32_API): res = win32_template.win32_controller(req) if isinstance(res, str): raise HTTPException(400, res) return res
def get_task_status(job_id: int = Query(..., gt=0)) -> Optional[str]: status = crud.get_task_status(job_id) if not status: raise HTTPException(status_code=404, detail='Task not found') return status.value
async def create_tenant(tenant: Tenant, request: Request): logging.debug(tenant) logging.debug(request.headers) # User must have write:tenant scope to create tenants if validate_token_and_scopes(request, "write:tenant") == False: raise HTTPException( status_code=401, detail= "token and scope validation failed. user is not permitted for this action" ) # Check if tenant exists to prevent accidental creation results = tenants.search(where("email") == tenant.email) if len(results) > 0: tenant = results[0] return JSONResponse(status_code=status.HTTP_201_CREATED, content=tenant) # Default values not supplied. # TODO Add GCP support logging.debug("create_tenant enter " + str(tenant)) if tenant.namespace == None: tenant.namespace = pydng.generate_name().replace("_", "-") if tenant.created_time == None: tenant.created_time = str(datetime.datetime.now()) if tenant.cloud_provider == None: tenant.cloud_provider = "AWS" logging.debug("create_tenant overrides " + str(tenant)) # Each tenant gets their own repo repo_name = "saas-tenant-" + tenant.namespace # Create tenant repo logging.debug("creating_tenant_repo " + repo_name) tenant_repo_obj = github_user.create_repo(repo_name) # Add github secrets to repo logging.debug("create_tenant add_secrets " + repo_name) gh_add_secret(env_github_user, repo_name, env_github_token, "AWS_ACCESS_KEY_ID", env_aws_access_key_id) gh_add_secret(env_github_user, repo_name, env_github_token, "AWS_SECRET_ACCESS_KEY", env_aws_secret_access_key) # Access the tenant template repo logging.debug("create_tenant copy_template") templated_repo_obj = github_user.get_repo(env_github_templated_repo) # Create contents of tenant repo from template repo contents #TODO Use kustomize to build base and overlays #kust_template = Template(templated_repo_obj.get_contents("/tier-customization/kustomization.yaml").decoded_content.decode('ascii')) #kust_spec = kust_template.substitute(tenantId=tenant.namespace, tier=tenant.tier) #tenant_repo_obj.create_file("kubernetes/kustomization.yaml", "creating tenant tier", kust_spec.encode('ascii')) # for repo_file in templated_repo_obj.get_contents("/tier"): # tenant_repo_obj.create_file("kubernetes/" + repo_file.name, "creating tenant", repo_file.decoded_content) logging.debug("create_tenant prepare_tenant") # Add deployment deployment_template = Template( templated_repo_obj.get_contents( "/tier/deployment.yaml").decoded_content.decode('ascii')) tier = tenant.tier.lower() deployment_spec = deployment_template.substitute( tenantId=tenant.namespace, reqCpu=tier_reqs[tier]["cpu"], reqMem=tier_reqs[tier]["mem"], limCpu=tier_limits[tier]["cpu"], limMem=tier_limits[tier]["mem"]) tenant_repo_obj.create_file("tier/deployment.yaml", "creating tenant deployment", deployment_spec.encode('ascii')) # Add service service_template = Template( templated_repo_obj.get_contents( "/tier/service.yaml").decoded_content.decode('ascii')) service_spec = service_template.substitute(tenantId=tenant.namespace) tenant_repo_obj.create_file("tier/service.yaml", "creating tenant service", service_spec.encode('ascii')) # ArgoCD Application logging.debug("create_tenant prepare_argo_app") gh_action_file = None if tenant.cloud_provider == "AWS": argocd_app_spec_template = Template( templated_repo_obj.get_contents( "/tier-customization/application.yaml").decoded_content.decode( 'ascii')) argocd_app_spec = argocd_app_spec_template.substitute( tenantId=tenant.namespace, repo="saas-tenant-" + tenant.namespace) tenant_repo_obj.create_file("application.yaml", "creating tenant app", argocd_app_spec.encode('ascii')) gh_action_file = templated_repo_obj.get_contents( "/tier-customization/deploy-argocd-app.yaml") else: config_sync_spec_template = Template( templated_repo_obj.get_contents( "/tier-customization/config_sync.yaml").decoded_content.decode( 'ascii')) config_sync_spec = config_sync_spec_template.substitute( tenantId=tenant.namespace, repo="saas-tenant-" + tenant.namespace) tenant_repo_obj.create_file("config_sync.yaml", "creating tenant app", config_sync_spec.encode('ascii')) gh_action_file = templated_repo_obj.get_contents( "/tier-customization/deploy-config-sync.yaml") tenant_repo_obj.create_file( ".github/workflows/deploy.yaml", "adding github action", gh_action_file.decoded_content.decode('ascii')) # Copy the github action LAST. tenant_repo_obj.create_file(".github/workflows/deploy.yaml", "adding github action", gh_action_file.decoded_content.decode('ascii')) # Save tenant info logging.debug("create_tenant save_tenant") tenant.tenant_url = tenant.namespace + ".saas-tenant.cloud" logging.debug("create_tenant dump_tenant_obj " + str(tenant)) tenants.insert(tenant.dict()) return JSONResponse(status_code=status.HTTP_201_CREATED, content=tenant.json())
import secrets from fastapi import Depends, HTTPException from sqlalchemy.orm import Session as DbSession from starlette.status import HTTP_401_UNAUTHORIZED from . import crud from .config import SESSION_DB_TOKEN_KEY from .core import oauth2_scheme, pwd_context from .db import get_db from .models import Account from .session_db import get_session_db CREDENTIALS_EXCEPTION = HTTPException( status_code=HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) LOGIN_EXCEPTION = HTTPException( status_code=HTTP_401_UNAUTHORIZED, detail="Incorrect username or password", headers={"WWW-Authenticate": "Bearer"}, ) def hash_token(token, secret_key): return hashlib.sha256(secret_key + token).hexdigest() def authenticate_account(db: DbSession, email: str, password: str):
def get_search( self, collections: Optional[List[str]] = None, ids: Optional[List[str]] = None, bbox: Optional[List[NumType]] = None, datetime: Optional[Union[str, datetime]] = None, limit: Optional[int] = 10, query: Optional[str] = None, token: Optional[str] = None, fields: Optional[List[str]] = None, sortby: Optional[str] = None, **kwargs, ) -> ItemCollection: """GET search catalog.""" # Parse request parameters base_args = { "collections": collections, "ids": ids, "bbox": bbox, "limit": limit, "token": token, "query": json.loads(query) if query else query, } if datetime: base_args["datetime"] = datetime if sortby: # https://github.com/radiantearth/stac-spec/tree/master/api-spec/extensions/sort#http-get-or-post-form sort_param = [] for sort in sortby: sort_param.append({ "field": sort[1:], "direction": "asc" if sort[0] == "+" else "desc", }) base_args["sortby"] = sort_param if fields: includes = set() excludes = set() for field in fields: if field[0] == "-": excludes.add(field[1:]) elif field[0] == "+": includes.add(field[1:]) else: includes.add(field) base_args["fields"] = {"include": includes, "exclude": excludes} # Do the request try: search_request = self.post_request_model(**base_args) except ValidationError: raise HTTPException(status_code=400, detail="Invalid parameters provided") resp = self.post_search(search_request, request=kwargs["request"]) # Pagination page_links = [] for link in resp["links"]: if link["rel"] == Relations.next or link[ "rel"] == Relations.previous: query_params = dict(kwargs["request"].query_params) if link["body"] and link["merge"]: query_params.update(link["body"]) link["method"] = "GET" link["href"] = f"{link['body']}?{urlencode(query_params)}" link["body"] = None link["merge"] = False page_links.append(link) else: page_links.append(link) resp["links"] = page_links return resp
def probe() -> str: raise HTTPException(401)
async def verify_token(x_token: str = Header(...)): if SECRET_TOKEN and SECRET_TOKEN != x_token: raise HTTPException(status_code=400, detail="Invalid X-Token header")
async def middleware(_ctx: JsonRpcContext): raise HTTPException(401) # noinspection PyUnreachableCode yield
def create_primer(primer: schemas.PrimerCreate, db: Session = Depends(get_db)): db_primer = crud.get_primer_by_name(db, name=primer.name) if db_primer: raise HTTPException(status_code=400, detail="Primer name already registered") return crud.create_primer(db=db, primer=primer)
async def middleware(_ctx: JsonRpcContext): yield raise HTTPException(401)
def log_and_raise(status=HTTPStatus.BAD_REQUEST, **kw): logger.error(str(kw)) raise HTTPException(status_code=status, detail=kw)
async def get_current_active_user( current_user: User = Depends(get_current_user)): if current_user.disabled: raise HTTPException(status_code=400, detail="Inactive user") return current_user
def query(obj: Query = Body(..., example=KG_ONEHOP_EXAMPLE), ) -> Dict: """Solve a one-hop TRAPI query.""" if obj.get("workflow", [{"id": "lookup"}]) != [{"id": "lookup"}]: raise HTTPException( 400, "The only supported workflow is a single 'lookup' operation") qgraph = copy.deepcopy(obj["message"]["query_graph"]) normalize_qgraph(qgraph) if len(qgraph["nodes"]) != 2: raise NotImplementedError("Number of nodes in query graph must be 2") if len(qgraph["edges"]) != 1: raise NotImplementedError("Number of edges in query graph must be 1") qedge_id, qedge = next(iter(qgraph["edges"].items())) if ("biolink:correlated_with" not in qedge["predicates"] and "biolink:has_real_world_evidence_of_association_with" not in qedge["predicates"]): return { "message": { "query_graph": qgraph, "knowledge_graph": { "nodes": {}, "edges": {} }, "results": [], } } source_qid = qedge["subject"] source_qnode = qgraph["nodes"][source_qid] target_qid = qedge["object"] target_qnode = qgraph["nodes"][target_qid] # features = correlations[0] source_features = features_from_node(source_qnode) target_features = features_from_node(target_qnode) kedge_pairs = [ tuple(sorted([source_feature, target_feature])) for source_feature in source_features for target_feature in target_features ] kgraph = { "nodes": {}, "edges": {}, } results = [] for pair in kedge_pairs: if pair not in correlations: continue p_value = correlations[pair] source_feature, target_feature = pair # note the source and target may be flipped, which is okay source_kid, source_knode = knode(source_feature) target_kid, target_knode = knode(target_feature) kgraph["nodes"].update({ source_kid: source_knode, target_kid: target_knode, }) kedges = knowledgegraph.knowledge_graph_edges(source_kid, target_kid, p_value=p_value) kgraph["edges"].update(kedges) results.append({ "node_bindings": { source_qid: [{ "id": source_kid }], target_qid: [{ "id": target_kid }], }, "edge_bindings": { qedge_id: [{ "id": kedge_id, } for kedge_id in kedges] }, "score": p_value, "score_name": "p value" }) return { "message": { "query_graph": obj["message"]["query_graph"], # Return unmodified "knowledge_graph": kgraph, "results": results, }, "workflow": [ { "id": "lookup" }, ], }
async def read_items(authorization: Optional[str] = Header(None)): error, req = checkAuthorization(token=authorization, scope='access_as_user') if error: raise HTTPException(status_code=req['status'], detail=req['error']) return {"authorization": authorization}