def get_router(self) -> APIRouter: router = APIRouter() router.get('/{model_id}')(self.read_endpoint) router.get('/')(self.index_endpoint) router.post('/')(self.create_endpoint) router.put('/')(self.update_endpoint) router.delete('/')(self.delete_endpoint) return router
def mount(name: str, router: APIRouter, klass_py, klass_orm, ignored=set()): """ mount mounts common CRUD patterns onto the router :param klass_py: Pydantic Model :param klass_orm: sqlalchemy Model :param ignored: set of names to ignore, only from create, read, update, delete, list """ def create(model_py: klass_py, sess: Session = Depends(create_session)): model_orm = klass_orm( **model_py.dict(exclude_none=True, exclude_unset=True)) sess.add(model_orm) sess.flush([model_orm]) return klass_py.from_orm(model_orm) def get(idx: int, sess: Session = Depends(create_session)): model_orm = sess.query(klass_orm).filter(klass_orm.id == idx).one() return klass_py.from_orm(model_orm) def update(model_py: klass_py, sess: Session = Depends(create_session)): if model_py.id is None: raise HTTPException(400, f"{name} id is not given") model_orm: klass_orm = sess.query(klass_orm).filter( klass_orm.id == model_py.id).one() for f, v in model_py.dict(exclude_none=True, exclude_unset=True).items(): if type(v) is dict: # nested models are usually mapped to foreign key objects continue setattr(model_orm, f, v) sess.add(model_orm) return def delete(idx: int, sess: Session = Depends(create_session)): model_orm = sess.query(klass_orm).filter(klass_orm.id == idx).one() sess.delete(model_orm) return klass_py.from_orm(model_orm) def list_all(sess: Session = Depends(create_session)): return [klass_py.from_orm(u) for u in sess.query(klass_orm).all()] if 'create' not in ignored: router.put('/', response_model=klass_py, status_code=status.HTTP_201_CREATED)(create) if 'read' not in ignored: router.get('/', response_model=klass_py)(get) if 'update' not in ignored: router.post('/')(update) if 'delete' not in ignored: router.delete('/', response_model=klass_py, status_code=status.HTTP_202_ACCEPTED)(delete) if 'list' not in ignored: router.get('/list', response_model=List[klass_py])(list_all)
def register_view(self, view, prefix=None, methods: Union[List[str], Set[str]] = ('GET', 'Retrieve', 'POST', 'PUT', 'DELETE'), tags=None, depends=None): """ 如果不使用自定义的,则需要methods为None :param view: :param prefix: :param methods: :param tags: :return: """ router = APIRouter() if not prefix: prefix = "/" + view.__class__.__name__ if not tags: tags = [prefix[1:]] if not methods: methods = view.methods if methods.count('GET'): # print("注意,可能需要设置返回model") # get_res_model = get_res_schema(view.schema) router.get( prefix, tags=tags, )(view.list) if methods.count('Retrieve'): router.get( prefix + "/{id}", tags=tags, )(view.retrieve) if methods.count('POST'): router.post( prefix, tags=tags, )(view.create) if methods.count('PUT'): router.put( prefix, tags=tags, )(view.update) if methods.count('DELETE'): router.delete(prefix + "/{id}", tags=tags)(view.delete) self.__router.include_router(router, prefix='/admin')
def health_status_router(): async def health_check(): logging.info('/Health check') try: response = {'version': '0.0.1', 'generated_at': str(time.time())} return response except Exception as e: # status_code=500 causes the health check to fail return {'status': 'error', "message": e} router = APIRouter() # router.post("/health_check", response_model=HealthStatusResponse)(health_check) router.get("/health_check", response_model=HealthStatusResponse)(health_check) return router
def meta_data_router(manager: ModelManager): async def available_model(model_name: str = None, version: str = None): if model_name is None: result = manager.all_models_meta_data_response() logging.info(result) return result response = manager.model_meta_data_response(model_name, version) if response is False: response = { 'error_message': 'Model or version is not found', 'available_models': manager.all_models_meta_data_response() } return JSONResponse(status_code=404, content=response) return response router = APIRouter() #router.post('/meta', response_model=ModelsMetaDataResponse, responses=model_not_found_response())(available_model) router.get('/meta', response_model=ModelsMetaDataResponse, responses=model_not_found_response())(available_model) return router
from src.rest.health.views import (HealthCheckView, HeathServicesView, HealthServicesDetail) from src.rest.health.schemas import health_check, list_services, service_detail from fastapi import APIRouter router = APIRouter() health = HealthCheckView() serv = HeathServicesView() details = HealthServicesDetail() router.get('', name='health_check', responses={ 200: { 'content': { 'application/json': { 'example': health_check } } } })(health.get) router.get('/services', name='health_services', responses={ 200: { 'content': { 'application/json': { 'example': list_services } }
return user_id async def user_retrieve(id, current_user: User = Depends( create_current_active_user(True))): """获取用户的详细信息""" query = select([ User, AccountBook.money, AccountBook.rate ]).where(User.id == id).where(AccountBook.user_id == User.id) print(query) return await AdminDatabase().database.fetch_one(query) router.get('/admin/user/{id}', tags=['user'], description="查看某个用户的详细信息", summary="查看详情")(user_retrieve) # router.get('/user/list', tags=['user'], response_model=UserListModel, summary="获取用户列表")(user_list) router.get('/admin/user', tags=['user'], response_model=UserListModel, summary="获取用户列表")(user_list) # router.get('/user/list/{id}', tags=['user'], response_model=UserListModel, summary="获取用户列表")(user_list) router.patch('/user/updateInfo', tags=['user'], response_model=ModifyBaseInfo, summary="更新个人数据")(modify_base_info) router.patch('/user/modifyPassword', tags=['user'], description="修改密码",
class Resource(MSONable): """ Implements a REST Compatible Resource as a URL endpoint This class provides a number of convenience features including full pagination, field projection, and the MAPI query lanaugage - implements custom error handlers to provide MAPI Responses - implement standard metadata respomse for class - JSON Configuration """ def __init__( self, store: Store, model: Union[BaseModel, str], tags: Optional[List[str]] = None, query_operators: Optional[List[QueryOperator]] = None, route_class: Type[APIRoute] = None, key_fields: List[str] = None, custom_endpoint_funcs: List[Callable] = None, enable_get_by_key: bool = True, enable_default_search: bool = True, ): """ Args: store: The Maggma Store to get data from model: the pydantic model to apply to the documents from the Store This can be a string with a full python path to a model or an actuall pydantic Model if this is being instantied in python code. Serializing this via Monty will autoconvert the pydantic model into a python path string tags: list of tags for the Endpoint query_operators: operators for the query language route_class: Custom APIRoute class to define post-processing or custom validation of response data key_fields: List of fields to always project. Default uses SparseFieldsQuery to allow user's to define these on-the-fly. custom_endpoint_funcs: Custom endpoint preparation functions to be used enable_get_by_key: Enable default key route for endpoint. enable_default_search: Enable default endpoint search behavior. """ self.store = store self.tags = tags or [] self.key_fields = key_fields self.versioned = False self.cep = custom_endpoint_funcs self.enable_get_by_key = enable_get_by_key self.enable_default_search = enable_default_search if isinstance(model, str): module_path = ".".join(model.split(".")[:-1]) class_name = model.split(".")[-1] class_model = dynamic_import(module_path, class_name) assert issubclass( class_model, BaseModel), "The resource model has to be a PyDantic Model" self.model = class_model elif isinstance(model, type) and issubclass(model, BaseModel): self.model = model else: raise ValueError("The resource model has to be a PyDantic Model") self.query_operators = ( query_operators if query_operators is not None else [ PaginationQuery(), SparseFieldsQuery( self.model, default_fields=[ self.store.key, self.store.last_updated_field ], ), ]) if any( isinstance(qop_entry, VersionQuery) for qop_entry in self.query_operators): self.versioned = True if route_class is not None: self.router = APIRouter(route_class=route_class) else: self.router = APIRouter() self.response_model = Response[self.model] # type: ignore self.prepare_endpoint() def prepare_endpoint(self): """ Internal method to prepare the endpoint by setting up default handlers for routes """ if self.cep is not None: for func in self.cep: func(self) if self.enable_get_by_key: self.build_get_by_key() if self.enable_default_search: self.set_dynamic_model_search() def build_get_by_key(self): key_name = self.store.key model_name = self.model.__name__ if self.key_fields is None: field_input = SparseFieldsQuery( self.model, [self.store.key, self.store.last_updated_field]).query else: def field_input(): return {"properties": self.key_fields} if not self.versioned: async def get_by_key( key: str = Path( ..., alias=key_name, title=f"The {key_name} of the {model_name} to get", ), fields: STORE_PARAMS = Depends(field_input), ): f""" Get's a document by the primary key in the store Args: {key_name}: the id of a single {model_name} Returns: a single {model_name} document """ self.store.connect() crit = {self.store.key: key} if model_name == "MaterialsCoreDoc": crit.update({"_sbxn": "core"}) elif model_name == "TaskDoc": crit.update({"sbxn": "core"}) elif model_name == "ThermoDoc": crit.update({"_sbxn": "core"}) item = self.store.query_one(criteria=crit, properties=fields["properties"]) if item is None: raise HTTPException( status_code=404, detail=f"Item with {self.store.key} = {key} not found", ) response = {"data": [item]} return response self.router.get( f"/{{{key_name}}}/", response_description=f"Get an {model_name} by {key_name}", response_model=self.response_model, response_model_exclude_unset=True, tags=self.tags, )(get_by_key) else: async def get_by_key_versioned( key: str = Path( ..., alias=key_name, title=f"The {key_name} of the {model_name} to get", ), fields: STORE_PARAMS = Depends(field_input), version: str = Query( None, description= "Database version to query on formatted as YYYY.MM.DD", ), ): f""" Get's a document by the primary key in the store Args: {key_name}: the id of a single {model_name} Returns: a single {model_name} document """ if version is not None: version = version.replace(".", "_") else: version = os.environ.get("DB_VERSION") prefix = self.store.collection_name.split("_")[0] self.store.collection_name = f"{prefix}_{version}" self.store.connect(force_reset=True) crit = {self.store.key: key} if model_name == "MaterialsCoreDoc": crit.update({"_sbxn": "core"}) elif model_name == "TaskDoc": crit.update({"sbxn": "core"}) elif model_name == "ThermoDoc": crit.update({"_sbxn": "core"}) item = self.store.query_one(criteria=crit, properties=fields["properties"]) if item is None: raise HTTPException( status_code=404, detail=f"Item with {self.store.key} = {key} not found", ) response = {"data": [item]} return response self.router.get( f"/{{{key_name}}}/", response_description=f"Get an {model_name} by {key_name}", response_model=self.response_model, response_model_exclude_unset=True, tags=self.tags, )(get_by_key_versioned) def set_dynamic_model_search(self): model_name = self.model.__name__ async def search(**queries: STORE_PARAMS): request: Request = queries.pop("request") # type: ignore query: STORE_PARAMS = merge_queries(list(queries.values())) query_params = [ entry for _, i in enumerate(self.query_operators) for entry in signature(i.query).parameters ] overlap = [ key for key in request.query_params.keys() if key not in query_params ] if any(overlap): raise HTTPException( status_code=404, detail= "Request contains query parameters which cannot be used: {}" .format(", ".join(overlap)), ) if self.versioned: if query["criteria"].get("version", None) is not None: version = query["criteria"]["version"].replace(".", "_") query["criteria"].pop("version") else: version = os.environ.get("DB_VERSION") prefix = self.store.collection_name.split("_")[0] self.store.collection_name = f"{prefix}_{version}" self.store.connect(force_reset=True) if model_name == "MaterialsCoreDoc": query["criteria"].update({"_sbxn": "core"}) elif model_name == "TaskDoc": query["criteria"].update({"sbxn": "core"}) elif model_name == "ThermoDoc": query["criteria"].update({"_sbxn": "core"}) data = list(self.store.query(**query)) # type: ignore operator_metas = [ operator.meta(self.store, query.get("criteria", {})) for operator in self.query_operators ] meta = {k: v for m in operator_metas for k, v in m.items()} response = {"data": data, "meta": meta} return response ann = { f"dep{i}": STORE_PARAMS for i, _ in enumerate(self.query_operators) } ann.update({"request": Request}) attach_signature( search, annotations=ann, defaults={ f"dep{i}": Depends(dep.query) for i, dep in enumerate(self.query_operators) }, ) self.router.get( "/", tags=self.tags, summary=f"Get {model_name} documents", response_model=self.response_model, response_description=f"Search for a {model_name}", response_model_exclude_unset=True, )(search) @self.router.get("", include_in_schema=False) def redirect_unslashes(): """ Redirects unforward slashed url to resource url with the forward slash """ url = self.router.url_path_for("/") return RedirectResponse(url=url, status_code=301) def run(self): # pragma: no cover """ Runs the Endpoint cluster locally This is intended for testing not production """ import uvicorn app = FastAPI() app.include_router(self.router, prefix="") uvicorn.run(app) def as_dict(self) -> Dict: """ Special as_dict implemented to convert pydantic models into strings """ d = super().as_dict() # Ensures sub-classes serialize correctly d["model"] = f"{self.model.__module__}.{self.model.__name__}" return d
from typing import List from apps.users.entities.user import UserEntity from apps.users.views.user import UserView from fastapi import APIRouter router = APIRouter() user_view = UserView() router.get( "/", name="list", summary="List users", response_model=List[UserEntity], )(user_view.list) router.get( "/{user_id}", name="get", summary="Get user by id", response_model=UserEntity, )(user_view.get) router.post( "/", name="create", summary="Create user", response_model=UserEntity, )(user_view.create) router.delete(
class Resource(MSONable): """ Implements a REST Compatible Resource as a URL endpoint This class provides a number of convenience features including full pagination, field projection, and the MAPI query lanaugage """ def __init__( self, store: Store, model: Union[BaseModel, str], tags: Optional[List[str]] = None, query_operators: Optional[List[QueryOperator]] = None, description: str = None, ): """ Args: store: The Maggma Store to get data from model: the pydantic model to apply to the documents from the Store This can be a string with a full python path to a model or an actuall pydantic Model if this is being instantied in python code. Serializing this via Monty will autoconvert the pydantic model into a python path string tags: list of tags for the Endpoint query_operators: operators for the query language description: an explanation of wht does this resource do """ self.store = store self.tags = tags or [] self.description = description self.model: BaseModel = BaseModel() if isinstance(model, str): module_path = ".".join(model.split(".")[:-1]) class_name = model.split(".")[-1] self.model = dynamic_import(module_path, class_name) else: self.model = model self.query_operators = ( query_operators if query_operators is not None else [ PaginationQuery(), SparseFieldsQuery(self.model, default_fields=[self.store.key]), DefaultDynamicQuery(self.model), ] ) self.response_model = Response[self.model] # type: ignore self.router = APIRouter() self.prepare_endpoint() def prepare_endpoint(self): """ Internal method to prepare the endpoint by setting up default handlers for routes """ self.build_get_by_key() self.set_dynamic_model_search() def build_get_by_key(self): key_name = self.store.key model_name = self.model.__name__ async def get_by_key( key: str = Path( ..., alias=key_name, title=f"The {key_name} of the {model_name} to get" ), fields: STORE_PARAMS = Depends( SparseFieldsQuery(self.model, [self.store.key]).query ), ): f""" Get's a document by the primary key in the store Args: {key_name}: the id of a single {model_name} Returns: a single {model_name} document """ self.store.connect() item = self.store.query_one( criteria={self.store.key: key}, properties=fields["properties"] ) if item is None: raise HTTPException( status_code=404, detail=f"Item with {self.store.key} = {key} not found", ) response = {"data": [item]} # , "meta": Meta()} return response self.router.get( f"/{{{key_name}}}/", response_description=f"Get an {model_name} by {key_name}", response_model=self.response_model, response_model_exclude_unset=True, tags=self.tags, )(get_by_key) def set_dynamic_model_search(self): model_name = self.model.__name__ async def search(**queries: STORE_PARAMS): self.store.connect() query: Dict[Any, Any] = merge_queries(list(queries.values())) count_query = query["criteria"] count = self.store.count(count_query) data = list(self.store.query(**query)) meta = Meta(total=count) response = {"data": data, "meta": meta.dict()} return response attach_signature( search, annotations={ f"dep{i}": STORE_PARAMS for i, _ in enumerate(self.query_operators) }, defaults={ f"dep{i}": Depends(dep.query) for i, dep in enumerate(self.query_operators) }, ) self.router.get( "/", tags=self.tags, summary=f"Get {model_name} documents", response_model=self.response_model, response_description=f"Search for a {model_name}", response_model_exclude_unset=True, )(search) def run(self): # pragma: no cover """ Runs the Endpoint cluster locally This is intended for testing not production """ import uvicorn app = FastAPI() app.include_router(self.router, prefix="") uvicorn.run(app) def as_dict(self) -> Dict: """ Special as_dict implemented to convert pydantic models into strings """ d = super().as_dict() # Ensures sub-classes serialize correctly d["model"] = f"{self.model.__module__}.{self.model.__name__}" # type: ignore return d
#!-*-coding:utf-8-*- from src.rest.file_models.views import FileModelsList, FileModelsDownload from fastapi import APIRouter from src.rest.file_models.schemas import list_file_models from starlette.responses import FileResponse router = APIRouter() lv = FileModelsList() dv = FileModelsDownload() router.get( '', name='file_models_list', responses={200: {'content': {'application/json': {'example': list_file_models}}}} )(lv.get) router.get( '/{fname}', name='file_models_download', response_class=FileResponse, responses={200: {'content': {'application/octet-stream': {'example': 'bytes'}}}} )(dv.get)
# _*_ coding:utf-8 _*_ # @Time:2021/2/23 15:32 # @Author:Cassie·Lau # @File urls.py """App v2 Router""" from fastapi import APIRouter from apps.controller.v2 import ( devops, ) router = APIRouter() #[Paas] router.get(path="/directory", tags=["server dir"])(devops.is_dir)
from fastapi import APIRouter from . import log_device_data, query_device_data, register_device from typing import Dict, List router = APIRouter() router.post("/log_device_data")(log_device_data.fun) router.get("/query_device_data/{device_id}", response_model=Dict[str, List[query_device_data.ChannelData]])( query_device_data.fun) router.post("/register_device")(register_device.fun)
#!-*-coding:utf-8-*- from fastapi import APIRouter from src.rest.tasks.views import TasksSaveLayer from src.rest.tasks.schemas import list_tasks router = APIRouter() view = TasksSaveLayer() router.get('', name='list_all_tasks_save_layer', responses={ 200: { 'content': { 'application/json': { 'example': list_tasks } } } })(view.list_all)
def build_app( tree, authentication=None, server_settings=None, query_registry=None, serialization_registry=None, compression_registry=None, ): """ Serve a Tree Parameters ---------- tree : Tree authentication: dict, optional Dict of authentication configuration. authenticators: list, optional List of authenticator classes (one per support identity provider) server_settings: dict, optional Dict of other server configuration. """ authentication = authentication or {} authenticators = { spec["provider"]: spec["authenticator"] for spec in authentication.get("providers", []) } server_settings = server_settings or {} query_registry = query_registry or get_query_registry() compression_registry = compression_registry or default_compression_registry app = FastAPI() if SHARE_TILED_PATH: # If the distribution includes static assets, serve UI routes. @app.get("/ui/{path:path}") async def ui(path): response = await lookup_file(path) return response async def lookup_file(path, try_app=True): if not path: path = "index.html" full_path = Path(SHARE_TILED_PATH, "ui", path) try: stat_result = await anyio.to_thread.run_sync( os.stat, full_path) except PermissionError: raise HTTPException(status_code=401) except FileNotFoundError: # This may be a URL that has meaning to the client-side application, # such as /ui/node/metadata/a/b/c. # Serve index.html and let the client-side application sort it out. if try_app: response = await lookup_file("index.html", try_app=False) return response raise HTTPException(status_code=404) except OSError: raise return FileResponse( full_path, stat_result=stat_result, method="GET", status_code=200, ) app.mount( "/static", StaticFiles(directory=Path(SHARE_TILED_PATH, "static")), name="ui", ) templates = Jinja2Templates(Path(SHARE_TILED_PATH, "templates")) @app.get("/", response_class=HTMLResponse) async def index( request: Request, # This dependency is here because it runs the code that moves # API key from the query parameter to a cookie (if it is valid). principal=Security(get_current_principal, scopes=[]), ): if request.headers.get("user-agent", "").startswith("python-tiled"): # This results in an error message like # ClientError: 400: To connect from a Python client, use # http://localhost:8000/api not http://localhost:8000/?root_path=true raise HTTPException( status_code=400, detail= f"To connect from a Python client, use {get_base_url(request)} not", ) return templates.TemplateResponse( "index.html", { "request": request, # This is used to fill in the Python code sample with the API URL. "api_url": get_base_url(request), # This is used to construct the link to the React UI. "root_url": get_root_url(request), # If defined, this adds a Binder link to the page. "binder_link": os.getenv("TILED_BINDER_LINK"), }, ) app.state.allow_origins = [] app.include_router(router, prefix="/api") # The Tree and Authenticator have the opportunity to add custom routes to # the server here. (Just for example, a Tree of BlueskyRuns uses this # hook to add a /documents route.) This has to be done before dependency_overrides # are processed, so we cannot just inject this configuration via Depends. for custom_router in getattr(tree, "include_routers", []): app.include_router(custom_router, prefix="/api") if authentication.get("providers", []): # Delay this imports to avoid delaying startup with the SQL and cryptography # imports if they are not needed. from .authentication import ( base_authentication_router, build_auth_code_route, build_handle_credentials_route, oauth2_scheme, ) # For the OpenAPI schema, inject a OAuth2PasswordBearer URL. first_provider = authentication["providers"][0]["provider"] oauth2_scheme.model.flows.password.tokenUrl = ( f"/api/auth/provider/{first_provider}/token") # Authenticators provide Router(s) for their particular flow. # Collect them in the authentication_router. authentication_router = APIRouter() # This adds the universal routes like /session/refresh and /session/revoke. # Below we will add routes specific to our authentication providers. authentication_router.include_router(base_authentication_router) for spec in authentication["providers"]: provider = spec["provider"] authenticator = spec["authenticator"] mode = authenticator.mode if mode == Mode.password: authentication_router.post(f"/provider/{provider}/token")( build_handle_credentials_route(authenticator, provider)) elif mode == Mode.external: authentication_router.get(f"/provider/{provider}/code")( build_auth_code_route(authenticator, provider)) authentication_router.post(f"/provider/{provider}/code")( build_auth_code_route(authenticator, provider)) else: raise ValueError(f"unknown authentication mode {mode}") for custom_router in getattr(authenticator, "include_routers", []): authentication_router.include_router( custom_router, prefix=f"/provider/{provider}") # And add this authentication_router itself to the app. app.include_router(authentication_router, prefix="/api/auth") @lru_cache(1) def override_get_authenticators(): return authenticators @lru_cache(1) def override_get_root_tree(): return tree @lru_cache(1) def override_get_settings(): settings = get_settings() for item in [ "allow_anonymous_access", "secret_keys", "single_user_api_key", "access_token_max_age", "refresh_token_max_age", "session_max_age", ]: if authentication.get(item) is not None: setattr(settings, item, authentication[item]) for item in ["allow_origins", "response_bytesize_limit"]: if server_settings.get(item) is not None: setattr(settings, item, server_settings[item]) database = server_settings.get("database", {}) if database.get("uri"): settings.database_uri = database["uri"] if database.get("pool_size"): settings.database_pool_size = database["pool_size"] if database.get("pool_pre_ping"): settings.database_pool_pre_ping = database["pool_pre_ping"] object_cache_available_bytes = server_settings.get( "object_cache", {}).get("available_bytes") if object_cache_available_bytes is not None: setattr(settings, "object_cache_available_bytes", object_cache_available_bytes) if authentication.get("providers"): # If we support authentication providers, we need a database, so if one is # not set, use a SQLite database in the current working directory. settings.database_uri = settings.database_uri or "sqlite:///./tiled.sqlite" return settings @app.on_event("startup") async def startup_event(): # Validate the single-user API key. settings = app.dependency_overrides[get_settings]() single_user_api_key = settings.single_user_api_key if single_user_api_key is not None: if not single_user_api_key.isalnum(): raise ValueError(""" The API key must only contain alphanumeric characters. We enforce this because pasting other characters into a URL, as in ?api_key=..., can result in confusing behavior due to ambiguous encodings. The API key can be as long as you like. Here are two ways to generate a valid one: # With openssl: openssl rand -hex 32 # With Python: python -c "import secrets; print(secrets.token_hex(32))" """) # Stash these to cancel this on shutdown. app.state.tasks = [] # Trees and Authenticators can run tasks in the background. background_tasks = [] background_tasks.extend(getattr(tree, "background_tasks", [])) for authenticator in authenticators: background_tasks.extend( getattr(authenticator, "background_tasks", [])) for task in background_tasks or []: asyncio_task = asyncio.create_task(task()) app.state.tasks.append(asyncio_task) # The /search route is defined at server startup so that the user has the # opporunity to register custom query types before startup. app.include_router(declare_search_router(query_registry), prefix="/api") app.state.allow_origins.extend(settings.allow_origins) app.add_middleware( CORSMiddleware, allow_origins=app.state.allow_origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) object_cache_logger.setLevel(settings.object_cache_log_level.upper()) object_cache_available_bytes = settings.object_cache_available_bytes import psutil TOTAL_PHYSICAL_MEMORY = psutil.virtual_memory().total if object_cache_available_bytes < 0: raise ValueError( "Negative object cache size is not interpretable.") if object_cache_available_bytes == 0: cache = NO_CACHE object_cache_logger.info("disabled") else: if 0 < object_cache_available_bytes < 1: # Interpret this as a fraction of system memory. object_cache_available_bytes = int( TOTAL_PHYSICAL_MEMORY * object_cache_available_bytes) else: object_cache_available_bytes = int( object_cache_available_bytes) cache = ObjectCache(object_cache_available_bytes) percentage = round(object_cache_available_bytes / TOTAL_PHYSICAL_MEMORY * 100) object_cache_logger.info( f"Will use up to {object_cache_available_bytes:_} bytes ({percentage:d}% of total physical RAM)" ) set_object_cache(cache) # Expose the root_tree here to make it easier to access it from tests, # in usages like: # client.context.app.state.root_tree app.state.root_tree = app.dependency_overrides[get_root_tree]() if settings.database_uri is not None: from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from ..database import orm from ..database.core import ( REQUIRED_REVISION, DatabaseUpgradeNeeded, UninitializedDatabase, check_database, initialize_database, make_admin_by_identity, ) connect_args = {} if settings.database_uri.startswith("sqlite"): connect_args.update({"check_same_thread": False}) engine = create_engine(settings.database_uri, connect_args=connect_args) redacted_url = engine.url._replace(password="******") try: check_database(engine) except UninitializedDatabase: # Create tables and stamp (alembic) revision. logger.info( f"Database {redacted_url} is new. Creating tables and marking revision {REQUIRED_REVISION}." ) initialize_database(engine) logger.info("Database initialized.") except DatabaseUpgradeNeeded as err: print( f""" The database used by Tiled to store authentication-related information was created using an older version of Tiled. It needs to be upgraded to work with this version of Tiled. Back up the database, and then run: tiled admin upgrade-database {redacted_url} """, file=sys.stderr, ) raise err from None else: logger.info( f"Connected to existing database at {redacted_url}.") SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) db = SessionLocal() for admin in authentication.get("tiled_admins", []): logger.info( f"Ensuring that principal with identity {admin} has role 'admin'" ) make_admin_by_identity( db, identity_provider=admin["provider"], id=admin["id"], ) async def purge_expired_sessions_and_api_keys(): logger.info( "Purging expired Sessions and API keys from the database.") while True: await asyncio.get_running_loop().run_in_executor( None, purge_expired(engine, orm.Session)) await asyncio.get_running_loop().run_in_executor( None, purge_expired(engine, orm.APIKey)) await asyncio.sleep(600) app.state.tasks.append( asyncio.create_task(purge_expired_sessions_and_api_keys())) @app.on_event("shutdown") async def shutdown_event(): for task in app.state.tasks: task.cancel() app.add_middleware( CompressionMiddleware, compression_registry=compression_registry, minimum_size=1000, ) @app.middleware("http") async def capture_metrics(request: Request, call_next): """ Place metrics in Server-Timing header, in accordance with HTTP spec. """ # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Server-Timing # https://w3c.github.io/server-timing/#the-server-timing-header-field # This information seems safe to share because the user can easily # estimate it based on request/response time, but if we add more detailed # information here we should keep in mind security concerns and perhaps # only include this for certain users. # Initialize a dict that routes and dependencies can stash metrics in. metrics = collections.defaultdict( lambda: collections.defaultdict(lambda: 0)) request.state.metrics = metrics # Record the overall application time. with record_timing(metrics, "app"): response = await call_next(request) # Server-Timing specifies times should be in milliseconds. # Prometheus specifies times should be in seconds. # Therefore, we store as seconds and convert to ms for Server-Timing here. # That is what the factor of 1000 below is doing. response.headers["Server-Timing"] = ", ".join( f"{key};" + ";".join((f"{metric}={value * 1000:.1f}" if metric == "dur" else f"{metric}={value:.1f}") for metric, value in metrics_.items()) for key, metrics_ in metrics.items()) response.__class__ = PatchedStreamingResponse # tolerate memoryview return response @app.middleware("http") async def double_submit_cookie_csrf_protection(request: Request, call_next): # https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#double-submit-cookie csrf_cookie = request.cookies.get(CSRF_COOKIE_NAME) if (request.method not in SAFE_METHODS) and set( request.cookies).intersection(SENSITIVE_COOKIES): if not csrf_cookie: return Response(status_code=403, content="Expected tiled_csrf_token cookie") # Get the token from the Header or (if not there) the query parameter. csrf_token = request.headers.get(CSRF_HEADER_NAME) if csrf_token is None: parsed_query = urllib.parse.parse_qs(request.url.query) csrf_token = parsed_query.get(CSRF_QUERY_PARAMETER) if not csrf_token: return Response( status_code=403, content= f"Expected {CSRF_QUERY_PARAMETER} query parameter or {CSRF_HEADER_NAME} header", ) # Securely compare the token with the cookie. if not secrets.compare_digest(csrf_token, csrf_cookie): return Response( status_code=403, content="Double-submit CSRF tokens do not match") response = await call_next(request) response.__class__ = PatchedStreamingResponse # tolerate memoryview if not csrf_cookie: response.set_cookie( key=CSRF_COOKIE_NAME, value=secrets.token_urlsafe(32), httponly=True, samesite="lax", ) return response @app.middleware("http") async def set_cookies(request: Request, call_next): "This enables dependencies to inject cookies that they want to be set." # Create some Request state, to be (possibly) populated by dependencies. request.state.cookies_to_set = [] response = await call_next(request) response.__class__ = PatchedStreamingResponse # tolerate memoryview for params in request.state.cookies_to_set: params.setdefault("httponly", True) params.setdefault("samesite", "lax") response.set_cookie(**params) return response app.openapi = partial(custom_openapi, app) app.dependency_overrides[get_authenticators] = override_get_authenticators app.dependency_overrides[get_root_tree] = override_get_root_tree app.dependency_overrides[get_settings] = override_get_settings if query_registry is not None: @lru_cache(1) def override_get_query_registry(): return query_registry app.dependency_overrides[ get_query_registry] = override_get_query_registry if serialization_registry is not None: @lru_cache(1) def override_get_serialization_registry(): return serialization_registry app.dependency_overrides[ get_serialization_registry] = override_get_serialization_registry metrics_config = server_settings.get("metrics", {}) if metrics_config.get("prometheus", False): from . import metrics app.include_router(metrics.router, prefix="/api") @app.middleware("http") async def capture_metrics_prometheus(request: Request, call_next): response = await call_next(request) metrics.capture_request_metrics(request, response) return response return app
from functools import partial from typing import Optional, TypeVar, Callable from urllib.parse import quote from fastapi import APIRouter, Response, Request from fastapi.exceptions import HTTPException from pydantic import BaseModel from ..data import * from ..mason import MasonBase, Control, Namespace entry = APIRouter() entry.get = partial(entry.get, response_model_exclude_defaults=True, response_model_exclude_none=True) """ Replace this with a 'better' version that will quote absolutely everything. ASGI spec can't handle / characters and maybe some other ones, so we have to take them out. Similarly all incoming new models have to be sanitized. """ quote = partial(quote, safe="") def path(request: Request, func: str, **kwargs) -> str: """ Resolve path from request Until router can resolve full paths, this will stay here :param request: Request :param func: Path function to resolve from
def declare_search_router(query_registry): """ This is done dynamically at router startup. We check the registry of known search query types, which is user configurable, and use that to define the allowed HTTP query parameters for this route. """ async def node_search( request: Request, path: str, fields: Optional[List[schemas.EntryFields]] = Query( list(schemas.EntryFields)), select_metadata: Optional[str] = Query(None), offset: Optional[int] = Query(0, alias="page[offset]"), limit: Optional[int] = Query(DEFAULT_PAGE_SIZE, alias="page[limit]"), sort: Optional[str] = Query(None), omit_links: bool = Query(False), entry: Any = Security(entry, scopes=["read:metadata"]), query_registry=Depends(get_query_registry), **filters, ): request.state.endpoint = "search" try: resource, metadata_stale_at, must_revalidate = construct_entries_response( query_registry, entry, "/node/search", path, offset, limit, fields, select_metadata, omit_links, filters, sort, get_base_url(request), resolve_media_type(request), ) # We only get one Expires header, so if different parts # of this response become stale at different times, we # cite the earliest one. entries_stale_at = getattr(entry, "entries_stale_at", None) headers = {} if (metadata_stale_at is None) or (entries_stale_at is None): expires = None else: expires = min(metadata_stale_at, entries_stale_at) if must_revalidate: headers["Cache-Control"] = "must-revalidate" return json_or_msgpack( request, resource.dict(), expires=expires, headers=headers, ) except NoEntry: raise HTTPException(status_code=404, detail="No such entry.") except WrongTypeForRoute as err: raise HTTPException(status_code=404, detail=err.args[0]) except JMESPathError as err: raise HTTPException( status_code=400, detail= f"Malformed 'select_metadata' parameter raised JMESPathError: {err}", ) # Black magic here! FastAPI bases its validation and auto-generated swagger # documentation on the signature of the route function. We do not know what # that signature should be at compile-time. We only know it once we have a # chance to check the user-configurable registry of query types. Therefore, # we modify the signature here, at runtime, just before handing it to # FastAPI in the usual way. # When FastAPI calls the function with these added parameters, they will be # accepted via **filters. # Make a copy of the original parameters. signature = inspect.signature(node_search) parameters = list(signature.parameters.values()) # Drop the **filters parameter from the signature. del parameters[-1] # Add a parameter for each field in each type of query. for name, query in query_registry.name_to_query_type.items(): for field in dataclasses.fields(query): # The structured "alias" here is based on # https://mglaman.dev/blog/using-json-router-query-your-search-router-indexes if getattr(field.type, "__origin__", None) is list: field_type = str else: field_type = field.type injected_parameter = inspect.Parameter( name=f"filter___{name}___{field.name}", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Query( None, alias=f"filter[{name}][condition][{field.name}]"), annotation=Optional[List[field_type]], ) parameters.append(injected_parameter) node_search.__signature__ = signature.replace(parameters=parameters) # End black magic # Register the search route. router = APIRouter() router.get( "/node/search", response_model=schemas.Response[List[schemas.Resource[ schemas.NodeAttributes, dict, dict]], schemas.PaginationLinks, dict, ], include_in_schema=False, )(node_search) router.get( "/node/search/{path:path}", response_model=schemas.Response[List[schemas.Resource[ schemas.NodeAttributes, dict, dict]], schemas.PaginationLinks, dict, ], )(node_search) return router
router = APIRouter() lv = LayerView() router.post( '/upload', name='upload_layer', status_code=status.HTTP_204_NO_CONTENT )(lv.post) router.get( '/{layer_id}/mvt/{z}/{x}/{y}.{fmt}', name='get_mvt_layer', response_class=StreamingResponse, responses={ 200: { "content": { "application/vnd.mapbox-vector-tile": { "example": "bytes" } }, "description": "Retorna um tile .pbf.", } } )(lv.get_mvt) router.get( '', name='get_layers', responses={200: {'content': {'application/json': {"example": list_layers}}}} )(lv.list_all)
def create_table_crud(table,app,prefix:str="/db",api_key:str=None): if api_key: os.environ['API_CRUD_KEY']=api_key router = APIRouter() pkey_attr_names = [pkey.name for pkey in table._pk_attrs_] path_args = [f'{{{pk}}}' for pk in pkey_attr_names] get_query_params = [] api_get_params = [] security_parameter = Parameter('_api_key',kind=Parameter.POSITIONAL_OR_KEYWORD,annotation=APIKey,default=Depends(get_api_key)) for col in table._columns_: arg_type = table._adict_.get(col).py_type if arg_type not in [dict,ormtypes.Json]: if arg_type not in [int,bool,float,UUID,datetime]: arg_type=str param = Parameter(col,kind=Parameter.POSITIONAL_OR_KEYWORD,annotation=arg_type,default=None) get_query_params.append(param) if col in pkey_attr_names: api_get_params.append(Parameter(col,kind=Parameter.POSITIONAL_OR_KEYWORD,annotation=arg_type)) if api_key: get_query_params.append(security_parameter) def get_func(*args,**kwargs): return api_get(table,kwargs) sig = signature(get_func) sig = sig.replace(parameters=tuple(get_query_params)) get_func.__signature__ = sig router.get( f"/{table.__name__}", summary=f'get items from {table.__name__} table', response_model= get_api_model(table,"RESPONSE"))(get_func) def del_func(*args,**kwargs): return api_delete(table,kwargs) del_fun_params = api_get_params if api_key: del_fun_params.append(security_parameter) sig = signature(del_func) sig = sig.replace(parameters=tuple(del_fun_params)) del_func.__signature__ = sig router.delete(f"/{table.__name__}/{'/'.join(path_args)}",summary=f'delete items from {table.__name__} table')(del_func) if api_key: @router.post(f"/{table.__name__}",summary=f'post items to {table.__name__} table') def post_func(body:get_api_model(table,"POST"),_api_key:APIKey=Depends(get_api_key)): return api_post(table,body) else: @router.post(f"/{table.__name__}",summary=f'post items to {table.__name__} table') def post_func(body:get_api_model(table,"POST")): return api_post(table,body) def put_func(*args,**kwargs): pkeys = [i.name for i in table._pk_attrs_] entity_pkeys = {i:j for i,j in kwargs.items() if i in pkeys} return api_put(table,new_data = kwargs['body'], entity_pkeys = entity_pkeys) pkey_fun_args = [Parameter(pkey.name,kind=Parameter.POSITIONAL_OR_KEYWORD) for pkey in table._pk_attrs_] body_arg = [Parameter("body",kind=Parameter.POSITIONAL_OR_KEYWORD,annotation=get_api_model(table,"PUT"))] put_args = pkey_fun_args+body_arg if api_key: put_args.append(security_parameter) sig = signature(put_func) sig = sig.replace(parameters=tuple(put_args)) put_func.__signature__ = sig router.put(f"/{table.__name__}/{'/'.join(path_args)}",summary=f'update items in {table.__name__} table')(put_func) app.include_router( router, prefix=prefix, tags=['Database'])