def predict_router(manager): request_types = [ servable.model.request_format() for servable in manager.servables ] request_format = request_types[0] if len(set(request_types)) == 1 else Dict async def predict_using_newest_version(input: request_format, model_name: str = None, version: str = None): result = manager.predict(model_name=model_name, version=version, input=input) return result # set up main model_name route to newest servable response_types = [ servable.model.response_format() for servable in manager.servables ] response_format = response_types[0] if len( set(response_types)) == 1 else Dict router = APIRouter() router.post( '/predict', response_model=response_format, responses=model_not_found_response())(predict_using_newest_version) return router
def to_fastapi_router(self): """ Creates an instance of FastAPI router. Must be called after route definitions :return: APIRouter instance """ router = APIRouter() # POST methods for path, route_info in self._routes.post_map.items(): resp_model = self.get_response_model(path, "post") req_model = route_info.request_model # if route is not customized, fall back to default POST if route_info.handler == dummy_route: route_info = self._routes.default_post route_name = route_info.name if route_info.name_factory: route_name = route_info.name_factory(path) handler = copy_function(route_info.handler) add_annotation_to_first_argument(handler, req_model) # noqa type: ignore router.post( path, name=route_name, summary=route_name, description=route_info.description, response_description=route_info.response_description, response_model=resp_model, tags=route_info.tags, )(handler) return router
def get_router(self) -> APIRouter: router = APIRouter() router.get('/{model_id}')(self.read_endpoint) router.get('/')(self.index_endpoint) router.post('/')(self.create_endpoint) router.put('/')(self.update_endpoint) router.delete('/')(self.delete_endpoint) return router
def mount(name: str, router: APIRouter, klass_py, klass_orm, ignored=set()): """ mount mounts common CRUD patterns onto the router :param klass_py: Pydantic Model :param klass_orm: sqlalchemy Model :param ignored: set of names to ignore, only from create, read, update, delete, list """ def create(model_py: klass_py, sess: Session = Depends(create_session)): model_orm = klass_orm( **model_py.dict(exclude_none=True, exclude_unset=True)) sess.add(model_orm) sess.flush([model_orm]) return klass_py.from_orm(model_orm) def get(idx: int, sess: Session = Depends(create_session)): model_orm = sess.query(klass_orm).filter(klass_orm.id == idx).one() return klass_py.from_orm(model_orm) def update(model_py: klass_py, sess: Session = Depends(create_session)): if model_py.id is None: raise HTTPException(400, f"{name} id is not given") model_orm: klass_orm = sess.query(klass_orm).filter( klass_orm.id == model_py.id).one() for f, v in model_py.dict(exclude_none=True, exclude_unset=True).items(): if type(v) is dict: # nested models are usually mapped to foreign key objects continue setattr(model_orm, f, v) sess.add(model_orm) return def delete(idx: int, sess: Session = Depends(create_session)): model_orm = sess.query(klass_orm).filter(klass_orm.id == idx).one() sess.delete(model_orm) return klass_py.from_orm(model_orm) def list_all(sess: Session = Depends(create_session)): return [klass_py.from_orm(u) for u in sess.query(klass_orm).all()] if 'create' not in ignored: router.put('/', response_model=klass_py, status_code=status.HTTP_201_CREATED)(create) if 'read' not in ignored: router.get('/', response_model=klass_py)(get) if 'update' not in ignored: router.post('/')(update) if 'delete' not in ignored: router.delete('/', response_model=klass_py, status_code=status.HTTP_202_ACCEPTED)(delete) if 'list' not in ignored: router.get('/list', response_model=List[klass_py])(list_all)
def register_view(self, view, prefix=None, methods: Union[List[str], Set[str]] = ('GET', 'Retrieve', 'POST', 'PUT', 'DELETE'), tags=None, depends=None): """ 如果不使用自定义的,则需要methods为None :param view: :param prefix: :param methods: :param tags: :return: """ router = APIRouter() if not prefix: prefix = "/" + view.__class__.__name__ if not tags: tags = [prefix[1:]] if not methods: methods = view.methods if methods.count('GET'): # print("注意,可能需要设置返回model") # get_res_model = get_res_schema(view.schema) router.get( prefix, tags=tags, )(view.list) if methods.count('Retrieve'): router.get( prefix + "/{id}", tags=tags, )(view.retrieve) if methods.count('POST'): router.post( prefix, tags=tags, )(view.create) if methods.count('PUT'): router.put( prefix, tags=tags, )(view.update) if methods.count('DELETE'): router.delete(prefix + "/{id}", tags=tags)(view.delete) self.__router.include_router(router, prefix='/admin')
def model_predict_router(manager: ModelManager, model_name: str) -> Tuple[List[str], APIRouter]: servables = [ servable for servable in manager.servables if servable.meta_data.model_name == model_name ] newest_servable = Servable.newest_servable(servables) # set router router = APIRouter() async def predict_using_newest_version( input: newest_servable.model.request_format()): logging.debug('Prediction; model: {}, version: {}'.format( model_name, newest_servable.aspired_model)) result = manager.predict(model_name=model_name, input=input) return result # set up main model_name route to newest servable router.post( '/predict/{}'.format(model_name), response_model=newest_servable.model.response_format(), responses=model_not_found_response())(predict_using_newest_version) for servable in servables: version = str(servable.aspired_model.aspired_version) async def predict_with_specific_version( input: servable.model.request_format()): logging.debug('Prediction; model: {}, version: {}'.format( model_name, servable.meta_data.version)) result = manager.predict(model_name=model_name, version=version, input=input) return result router.post('/predict/{}/{}'.format(model_name, version), response_model=LabelScoreResponse, responses=model_not_found_response())( predict_with_specific_version) return [model_name], router
def register(router: APIRouter) -> None: router.post("/signup")(adapt_route(make_sign_up_controller()))
router = APIRouter() user_view = UserView() router.get( "/", name="list", summary="List users", response_model=List[UserEntity], )(user_view.list) router.get( "/{user_id}", name="get", summary="Get user by id", response_model=UserEntity, )(user_view.get) router.post( "/", name="create", summary="Create user", response_model=UserEntity, )(user_view.create) router.delete( "/{user_id}", name="delete", summary="Delete user", response_model=UserEntity, )(user_view.delete)
from fastapi import APIRouter, Query, Depends from sqlalchemy import select, func, insert from fastapi_admin import User, AdminDatabase from fastapi_admin.auth.depends import create_current_active_user from fastapi_admin.publicDepends.paging_query import paging_query_depend from .schemas import TransactionType, OrderStatusEnum, OrderPostSchema, OrderPostRes router = APIRouter() from .models import Order from fastapi_admin.views.methods_get import model_get_list_func from fastapi_admin.views.methods_post import model_post_func order_get_list, schema = model_get_list_func(Order, ) order_post, order_post_schema = model_post_func(Order) router.post('/order', name="创建订单", deprecated=True, response_model=order_post_schema)(order_post) @router.post('/v2/order', name='创建订单', deprecated=True, response_model=OrderPostRes) async def order_post(order_info: OrderPostSchema, current_user: User = Depends(create_current_active_user(True))): print(order_info) res = dict(order_info) query = insert(Order).values(res) res['id'] = await AdminDatabase().database.execute(query) return res @router.get('/order', name="订单列表过滤功能测试") async def order_list(platform: str = Query(None, description="平台订单号"), tenant: str = Query(None, description="商户订单号"), official: str = Query(None, description="官方订单号"),
message["results"] = [ upgrade_Result(result) for result in return_value.pop("results") ] return_value = { "message": message, **return_value, } if reasoner: return return_value return {"return value": return_value} ROUTER.post( "/knowledge_graph_one_hop", response_model=Dict, deprecated=True, )(knowledge_graph_one_hop) ROUTER.post( "/query", response_model=Dict, tags=["reasoner"], )(knowledge_graph_one_hop) @ROUTER.get( "/bins", response_model=Dict, ) def handle_bins( year: str = None,
from fastapi import APIRouter from . import log_device_data, query_device_data, register_device from typing import Dict, List router = APIRouter() router.post("/log_device_data")(log_device_data.fun) router.get("/query_device_data/{device_id}", response_model=Dict[str, List[query_device_data.ChannelData]])( query_device_data.fun) router.post("/register_device")(register_device.fun)
#!-*-coding:utf-8-*- from fastapi import APIRouter, status from src.rest.layer.views import LayerView from src.rest.layer.schemas import list_layers from fastapi.responses import StreamingResponse router = APIRouter() lv = LayerView() router.post( '/upload', name='upload_layer', status_code=status.HTTP_204_NO_CONTENT )(lv.post) router.get( '/{layer_id}/mvt/{z}/{x}/{y}.{fmt}', name='get_mvt_layer', response_class=StreamingResponse, responses={ 200: { "content": { "application/vnd.mapbox-vector-tile": { "example": "bytes" } }, "description": "Retorna um tile .pbf.", } } )(lv.get_mvt)
{{ cookiecutter.aggregate_name_snake }}_router = APIRouter() auth_router = APIRouter() @app.on_event("startup") async def on_startup(): bootstrap.register_handlers() await session.async_db.connect() @app.on_event("shutdown") async def shutdown(): await session.async_db.disconnect() @{{ cookiecutter.aggregate_name_snake }}_router.post('', response_model={{ cookiecutter.aggregate_name_camel }}Resp, status_code=status.HTTP_201_CREATED) async def create_{{ cookiecutter.aggregate_name_snake }}(cmd: Create{{ cookiecutter.aggregate_name_camel }}, db: Session = Depends(get_db), _: models.UserDB = Depends(users.users.current_user())): aggreg = await service.create_aggreg(db, cmd) return {{ cookiecutter.aggregate_name_camel }}Resp(data=aggreg) @auth_router.post('/refresh') async def refresh_jwt(response: Response, user=Depends(users.users.current_user())): return await user.jwt_authentication.get_login_response(user, response) auth_router.include_router(users.auth_router) auth_router.include_router(users.register_router) auth_router.include_router(users.reset_password_router) auth_router.include_router(users.verify_router)
def build_app( tree, authentication=None, server_settings=None, query_registry=None, serialization_registry=None, compression_registry=None, ): """ Serve a Tree Parameters ---------- tree : Tree authentication: dict, optional Dict of authentication configuration. authenticators: list, optional List of authenticator classes (one per support identity provider) server_settings: dict, optional Dict of other server configuration. """ authentication = authentication or {} authenticators = { spec["provider"]: spec["authenticator"] for spec in authentication.get("providers", []) } server_settings = server_settings or {} query_registry = query_registry or get_query_registry() compression_registry = compression_registry or default_compression_registry app = FastAPI() if SHARE_TILED_PATH: # If the distribution includes static assets, serve UI routes. @app.get("/ui/{path:path}") async def ui(path): response = await lookup_file(path) return response async def lookup_file(path, try_app=True): if not path: path = "index.html" full_path = Path(SHARE_TILED_PATH, "ui", path) try: stat_result = await anyio.to_thread.run_sync( os.stat, full_path) except PermissionError: raise HTTPException(status_code=401) except FileNotFoundError: # This may be a URL that has meaning to the client-side application, # such as /ui/node/metadata/a/b/c. # Serve index.html and let the client-side application sort it out. if try_app: response = await lookup_file("index.html", try_app=False) return response raise HTTPException(status_code=404) except OSError: raise return FileResponse( full_path, stat_result=stat_result, method="GET", status_code=200, ) app.mount( "/static", StaticFiles(directory=Path(SHARE_TILED_PATH, "static")), name="ui", ) templates = Jinja2Templates(Path(SHARE_TILED_PATH, "templates")) @app.get("/", response_class=HTMLResponse) async def index( request: Request, # This dependency is here because it runs the code that moves # API key from the query parameter to a cookie (if it is valid). principal=Security(get_current_principal, scopes=[]), ): if request.headers.get("user-agent", "").startswith("python-tiled"): # This results in an error message like # ClientError: 400: To connect from a Python client, use # http://localhost:8000/api not http://localhost:8000/?root_path=true raise HTTPException( status_code=400, detail= f"To connect from a Python client, use {get_base_url(request)} not", ) return templates.TemplateResponse( "index.html", { "request": request, # This is used to fill in the Python code sample with the API URL. "api_url": get_base_url(request), # This is used to construct the link to the React UI. "root_url": get_root_url(request), # If defined, this adds a Binder link to the page. "binder_link": os.getenv("TILED_BINDER_LINK"), }, ) app.state.allow_origins = [] app.include_router(router, prefix="/api") # The Tree and Authenticator have the opportunity to add custom routes to # the server here. (Just for example, a Tree of BlueskyRuns uses this # hook to add a /documents route.) This has to be done before dependency_overrides # are processed, so we cannot just inject this configuration via Depends. for custom_router in getattr(tree, "include_routers", []): app.include_router(custom_router, prefix="/api") if authentication.get("providers", []): # Delay this imports to avoid delaying startup with the SQL and cryptography # imports if they are not needed. from .authentication import ( base_authentication_router, build_auth_code_route, build_handle_credentials_route, oauth2_scheme, ) # For the OpenAPI schema, inject a OAuth2PasswordBearer URL. first_provider = authentication["providers"][0]["provider"] oauth2_scheme.model.flows.password.tokenUrl = ( f"/api/auth/provider/{first_provider}/token") # Authenticators provide Router(s) for their particular flow. # Collect them in the authentication_router. authentication_router = APIRouter() # This adds the universal routes like /session/refresh and /session/revoke. # Below we will add routes specific to our authentication providers. authentication_router.include_router(base_authentication_router) for spec in authentication["providers"]: provider = spec["provider"] authenticator = spec["authenticator"] mode = authenticator.mode if mode == Mode.password: authentication_router.post(f"/provider/{provider}/token")( build_handle_credentials_route(authenticator, provider)) elif mode == Mode.external: authentication_router.get(f"/provider/{provider}/code")( build_auth_code_route(authenticator, provider)) authentication_router.post(f"/provider/{provider}/code")( build_auth_code_route(authenticator, provider)) else: raise ValueError(f"unknown authentication mode {mode}") for custom_router in getattr(authenticator, "include_routers", []): authentication_router.include_router( custom_router, prefix=f"/provider/{provider}") # And add this authentication_router itself to the app. app.include_router(authentication_router, prefix="/api/auth") @lru_cache(1) def override_get_authenticators(): return authenticators @lru_cache(1) def override_get_root_tree(): return tree @lru_cache(1) def override_get_settings(): settings = get_settings() for item in [ "allow_anonymous_access", "secret_keys", "single_user_api_key", "access_token_max_age", "refresh_token_max_age", "session_max_age", ]: if authentication.get(item) is not None: setattr(settings, item, authentication[item]) for item in ["allow_origins", "response_bytesize_limit"]: if server_settings.get(item) is not None: setattr(settings, item, server_settings[item]) database = server_settings.get("database", {}) if database.get("uri"): settings.database_uri = database["uri"] if database.get("pool_size"): settings.database_pool_size = database["pool_size"] if database.get("pool_pre_ping"): settings.database_pool_pre_ping = database["pool_pre_ping"] object_cache_available_bytes = server_settings.get( "object_cache", {}).get("available_bytes") if object_cache_available_bytes is not None: setattr(settings, "object_cache_available_bytes", object_cache_available_bytes) if authentication.get("providers"): # If we support authentication providers, we need a database, so if one is # not set, use a SQLite database in the current working directory. settings.database_uri = settings.database_uri or "sqlite:///./tiled.sqlite" return settings @app.on_event("startup") async def startup_event(): # Validate the single-user API key. settings = app.dependency_overrides[get_settings]() single_user_api_key = settings.single_user_api_key if single_user_api_key is not None: if not single_user_api_key.isalnum(): raise ValueError(""" The API key must only contain alphanumeric characters. We enforce this because pasting other characters into a URL, as in ?api_key=..., can result in confusing behavior due to ambiguous encodings. The API key can be as long as you like. Here are two ways to generate a valid one: # With openssl: openssl rand -hex 32 # With Python: python -c "import secrets; print(secrets.token_hex(32))" """) # Stash these to cancel this on shutdown. app.state.tasks = [] # Trees and Authenticators can run tasks in the background. background_tasks = [] background_tasks.extend(getattr(tree, "background_tasks", [])) for authenticator in authenticators: background_tasks.extend( getattr(authenticator, "background_tasks", [])) for task in background_tasks or []: asyncio_task = asyncio.create_task(task()) app.state.tasks.append(asyncio_task) # The /search route is defined at server startup so that the user has the # opporunity to register custom query types before startup. app.include_router(declare_search_router(query_registry), prefix="/api") app.state.allow_origins.extend(settings.allow_origins) app.add_middleware( CORSMiddleware, allow_origins=app.state.allow_origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) object_cache_logger.setLevel(settings.object_cache_log_level.upper()) object_cache_available_bytes = settings.object_cache_available_bytes import psutil TOTAL_PHYSICAL_MEMORY = psutil.virtual_memory().total if object_cache_available_bytes < 0: raise ValueError( "Negative object cache size is not interpretable.") if object_cache_available_bytes == 0: cache = NO_CACHE object_cache_logger.info("disabled") else: if 0 < object_cache_available_bytes < 1: # Interpret this as a fraction of system memory. object_cache_available_bytes = int( TOTAL_PHYSICAL_MEMORY * object_cache_available_bytes) else: object_cache_available_bytes = int( object_cache_available_bytes) cache = ObjectCache(object_cache_available_bytes) percentage = round(object_cache_available_bytes / TOTAL_PHYSICAL_MEMORY * 100) object_cache_logger.info( f"Will use up to {object_cache_available_bytes:_} bytes ({percentage:d}% of total physical RAM)" ) set_object_cache(cache) # Expose the root_tree here to make it easier to access it from tests, # in usages like: # client.context.app.state.root_tree app.state.root_tree = app.dependency_overrides[get_root_tree]() if settings.database_uri is not None: from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from ..database import orm from ..database.core import ( REQUIRED_REVISION, DatabaseUpgradeNeeded, UninitializedDatabase, check_database, initialize_database, make_admin_by_identity, ) connect_args = {} if settings.database_uri.startswith("sqlite"): connect_args.update({"check_same_thread": False}) engine = create_engine(settings.database_uri, connect_args=connect_args) redacted_url = engine.url._replace(password="******") try: check_database(engine) except UninitializedDatabase: # Create tables and stamp (alembic) revision. logger.info( f"Database {redacted_url} is new. Creating tables and marking revision {REQUIRED_REVISION}." ) initialize_database(engine) logger.info("Database initialized.") except DatabaseUpgradeNeeded as err: print( f""" The database used by Tiled to store authentication-related information was created using an older version of Tiled. It needs to be upgraded to work with this version of Tiled. Back up the database, and then run: tiled admin upgrade-database {redacted_url} """, file=sys.stderr, ) raise err from None else: logger.info( f"Connected to existing database at {redacted_url}.") SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) db = SessionLocal() for admin in authentication.get("tiled_admins", []): logger.info( f"Ensuring that principal with identity {admin} has role 'admin'" ) make_admin_by_identity( db, identity_provider=admin["provider"], id=admin["id"], ) async def purge_expired_sessions_and_api_keys(): logger.info( "Purging expired Sessions and API keys from the database.") while True: await asyncio.get_running_loop().run_in_executor( None, purge_expired(engine, orm.Session)) await asyncio.get_running_loop().run_in_executor( None, purge_expired(engine, orm.APIKey)) await asyncio.sleep(600) app.state.tasks.append( asyncio.create_task(purge_expired_sessions_and_api_keys())) @app.on_event("shutdown") async def shutdown_event(): for task in app.state.tasks: task.cancel() app.add_middleware( CompressionMiddleware, compression_registry=compression_registry, minimum_size=1000, ) @app.middleware("http") async def capture_metrics(request: Request, call_next): """ Place metrics in Server-Timing header, in accordance with HTTP spec. """ # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Server-Timing # https://w3c.github.io/server-timing/#the-server-timing-header-field # This information seems safe to share because the user can easily # estimate it based on request/response time, but if we add more detailed # information here we should keep in mind security concerns and perhaps # only include this for certain users. # Initialize a dict that routes and dependencies can stash metrics in. metrics = collections.defaultdict( lambda: collections.defaultdict(lambda: 0)) request.state.metrics = metrics # Record the overall application time. with record_timing(metrics, "app"): response = await call_next(request) # Server-Timing specifies times should be in milliseconds. # Prometheus specifies times should be in seconds. # Therefore, we store as seconds and convert to ms for Server-Timing here. # That is what the factor of 1000 below is doing. response.headers["Server-Timing"] = ", ".join( f"{key};" + ";".join((f"{metric}={value * 1000:.1f}" if metric == "dur" else f"{metric}={value:.1f}") for metric, value in metrics_.items()) for key, metrics_ in metrics.items()) response.__class__ = PatchedStreamingResponse # tolerate memoryview return response @app.middleware("http") async def double_submit_cookie_csrf_protection(request: Request, call_next): # https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#double-submit-cookie csrf_cookie = request.cookies.get(CSRF_COOKIE_NAME) if (request.method not in SAFE_METHODS) and set( request.cookies).intersection(SENSITIVE_COOKIES): if not csrf_cookie: return Response(status_code=403, content="Expected tiled_csrf_token cookie") # Get the token from the Header or (if not there) the query parameter. csrf_token = request.headers.get(CSRF_HEADER_NAME) if csrf_token is None: parsed_query = urllib.parse.parse_qs(request.url.query) csrf_token = parsed_query.get(CSRF_QUERY_PARAMETER) if not csrf_token: return Response( status_code=403, content= f"Expected {CSRF_QUERY_PARAMETER} query parameter or {CSRF_HEADER_NAME} header", ) # Securely compare the token with the cookie. if not secrets.compare_digest(csrf_token, csrf_cookie): return Response( status_code=403, content="Double-submit CSRF tokens do not match") response = await call_next(request) response.__class__ = PatchedStreamingResponse # tolerate memoryview if not csrf_cookie: response.set_cookie( key=CSRF_COOKIE_NAME, value=secrets.token_urlsafe(32), httponly=True, samesite="lax", ) return response @app.middleware("http") async def set_cookies(request: Request, call_next): "This enables dependencies to inject cookies that they want to be set." # Create some Request state, to be (possibly) populated by dependencies. request.state.cookies_to_set = [] response = await call_next(request) response.__class__ = PatchedStreamingResponse # tolerate memoryview for params in request.state.cookies_to_set: params.setdefault("httponly", True) params.setdefault("samesite", "lax") response.set_cookie(**params) return response app.openapi = partial(custom_openapi, app) app.dependency_overrides[get_authenticators] = override_get_authenticators app.dependency_overrides[get_root_tree] = override_get_root_tree app.dependency_overrides[get_settings] = override_get_settings if query_registry is not None: @lru_cache(1) def override_get_query_registry(): return query_registry app.dependency_overrides[ get_query_registry] = override_get_query_registry if serialization_registry is not None: @lru_cache(1) def override_get_serialization_registry(): return serialization_registry app.dependency_overrides[ get_serialization_registry] = override_get_serialization_registry metrics_config = server_settings.get("metrics", {}) if metrics_config.get("prometheus", False): from . import metrics app.include_router(metrics.router, prefix="/api") @app.middleware("http") async def capture_metrics_prometheus(request: Request, call_next): response = await call_next(request) metrics.capture_request_metrics(request, response) return response return app