async def startup_event(): logger.setLevel(logging.DEBUG) rds = await aioredis.create_redis_pool('redis://localhost', encoding='utf-8') await rdsopr.full_set_up(rds) await database.connect() await crud_deal.initial_deal()
def __init__( self, app: ASGIApp, querystrings: bool = False, headers: bool = False, ) -> None: """Init Middleware.""" super().__init__(app) self.logger = logger logger.setLevel(logging.DEBUG) self.querystrings = querystrings self.headers = headers
def setup(self, cfg): handler = logging.NullHandler() self.error_logger = logging.getLogger("gunicorn.error") self.error_logger.addHandler(handler) self.access_logger = logging.getLogger("gunicorn.access") self.access_logger.addHandler(handler) logLevel = get_log_level_config() self.error_logger.setLevel(logLevel) self.access_logger.setLevel(logLevel) fastapi_logger.setLevel(logLevel)
def create_app(): # disable logger flogger.setLevel(logging.ERROR) _app = FastAPI() db.init_db() def run_on_start(): logger.info('initializing labml_app') try: repo = git.Repo(search_parent_directories=True) sha = repo.head.object.hexsha logger.error(f'THIS IS NOT AN ERROR: Server Deployed SHA : {sha}') except git.InvalidGitRepositoryError: pass run_on_start() return _app
logger=logger, log_ex=SETTING_CONTAINER.settings_local['LOGGER_FILE'], verbose=False, log_level=SETTING_CONTAINER.settings_local['LOG_LEVEL'], console=False, ) uvicorn_access_logger = logging.getLogger('uvicorn.access') add_hnd = lw.create_tech_handler( log_file=SETTING_CONTAINER.settings_local['LTECH_FILE'], log_level=SETTING_CONTAINER.settings_local['LTECH_LEVEL'], ) uvicorn_access_logger.addHandler(add_hnd) fastapi_logger.addHandler(add_hnd) if __name__ != 'main': fastapi_logger.setLevel(add_hnd.level) uvicorn_access_logger.setLevel(add_hnd.level) else: fastapi_logger.setLevel(logging.DEBUG) uvicorn_access_logger.setLevel(logging.INFO) logger.info("Server started at {}".format(datetime.now())) # ---- routers section app = FastAPI( title=SETTING_CONTAINER.settings['API_TITLE'], description=SETTING_CONTAINER.settings['API_DESCRIPTION'], version=SETTING_CONTAINER.settings['API_MAIN_VERSION'], )
from starlette.requests import Request from .libs.database import client as db from .libs.logging import JsonFormatter from .routers import router DB_HOST = os.environ["DB_HOST"] DB_USER = os.environ["DB_USER"] DB_PASSWORD = os.environ["DB_PASSWORD"] DB_NAME = os.environ["DB_NAME"] APP_LOG_LEVEL = os.getenv("APP_LOG_LEVEL", "INFO") handler = logging.StreamHandler() handler.setFormatter(JsonFormatter()) logger.addHandler(handler) logger.setLevel(APP_LOG_LEVEL) app = FastAPI() app.include_router(router, prefix="/v1") @app.on_event("startup") async def db_startup() -> None: await db.init( host=DB_HOST, user=DB_USER, password=DB_PASSWORD, db=DB_NAME, ) logger.info("DB client successfully connected.")
from scienceqa import load_sqa, main_sqa from base import (compute_indicators, get_indicators, answer_quantities, back_all) os.environ["CUDA_VISIBLE_DEVICES"] = "-1" config = ConfigParser() config.read("../conf/conf.ini") PROD = eval(config['MODE']['PRODUCTION']) INDEX = config['ES']['QUANTITIES'] mail = config['MAIL'] HOST = os.getenv('API_HOST', '0.0.0.0') PORT = os.getenv('API_PORT', 5000) uvicorn_logger = logging.getLogger('uvicorn') f_logger.handlers = uvicorn_logger.handlers f_logger.setLevel(uvicorn_logger.level) MAIL = ConnectionConfig(MAIL_USERNAME=os.getenv('MAIL_USER', ''), MAIL_PASSWORD=os.getenv('MAIL_KEY', ''), MAIL_FROM="*****@*****.**", MAIL_FROM_NAME="Science Checker", MAIL_PORT=int(os.getenv('MAIL_PORT', 587)), MAIL_SERVER=os.getenv('MAIL_SERVER', ''), MAIL_TLS=True, MAIL_SSL=False, TEMPLATE_FOLDER=mail['TEMPLATE']) CLASSIFIER = config['SQA']['CLASSIFIER'] PULLER = config['SQA'].get('PULLER') EXTRACTOR = config['SQA'].get('EXTRACTOR', 'eqa')
import logging from starlette.middleware.cors import CORSMiddleware from sqlalchemy.exc import SQLAlchemyError from werewolf.api import api_router # from werewolf.api.sio import sio_app from werewolf.websocket.websocket import broadcaster, init_websocket from werewolf.core.config import settings from werewolf.utils.game_exceptions import GameFinished from werewolf.utils.enums import GameEnum from werewolf.websocket.websocket import publish_history from werewolf.models import Game, Role gunicorn_logger = logging.getLogger('gunicorn.error') logger.handlers = gunicorn_logger.handlers logger.setLevel(gunicorn_logger.level) app = FastAPI() if settings.BACKEND_CORS_ORIGINS: app.add_middleware( CORSMiddleware, allow_origins=["*"], # allow_origins=[str(origin) for origin in settings.BACKEND_CORS_ORIGINS], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) app.include_router(api_router, prefix=settings.API_PREFIX) # app.mount(settings.WEB_SOCKET_PREFIX, sio_app)
from apscheduler.schedulers.background import BackgroundScheduler from app import endpoints, schedules from app.init_database import init app = FastAPI() gunicorn_error_logger = logging.getLogger("gunicorn.error") gunicorn_logger = logging.getLogger("gunicorn") fastapi_logger.handlers = gunicorn_error_logger.handlers logging.root.handlers.extend(gunicorn_error_logger.handlers) logging.root.setLevel(gunicorn_error_logger.level) if __name__ != "__main__": fastapi_logger.setLevel(gunicorn_logger.level) else: fastapi_logger.setLevel(logging.DEBUG) @app.on_event('startup') def startup_event(): try: schedules.reset_smart_off() schedules.scheduled_sync() schedules.scheduled_run() except HTTPException as e: fastapi_logger.warn(e) app.add_middleware(
# custom log formatter class OneLineExceptionFormatter(logging.Formatter): def formatException(self, exc_info): result = super(OneLineExceptionFormatter, self).formatException(exc_info) return repr(result) # or format into one line however you want to def format(self, record): s = super(OneLineExceptionFormatter, self).format(record) if record.exc_text: s = s.replace('\n', '') + '|' return s # defining log levels # LOG_LEVEL = logging.ERROR # Configuring Logs # db_logger = logging.getLogger() # db_logger.setLevel(LOG_LEVEL) # add database handler handler = SQLALCHAMYHandler() # db_logger.addHandler(handler) gunicorn_logger = logging.getLogger("gunicorn.error") fastapi_logger.setLevel(gunicorn_logger.level) # add database handler fastapi_logger.addHandler(handler)
from io import BytesIO import logging import shutil from typing import Optional, List, final from fastapi import APIRouter, BackgroundTasks, HTTPException, Body from fastapi.logger import logger from starlette.responses import StreamingResponse from ..utils.image import get_img_id, update_drive_img_dirs, image_merge_text, get_img # from ..utils.security import api_key_checker from ..schemas.image import MergeText from ..config import get_settings router = APIRouter() logger.setLevel(logging.DEBUG) settings = get_settings() drive_img_dirs = dict() cert_example = [{ 'text': '掙扎得勝獎', 'font': 'ヒラギノ角ゴシック W4.ttc', 'position': (300, 400), 'color': (250, 220, 150), 'size': 250 }, { 'text': 'HIGHWALL', 'font': 'NewYork.ttf', 'color': (0, 0, 0), 'position': (1300, 850), 'size': 80 }, {
from fastapi_versioning import VersionedFastAPI from fastapi.logger import logger from fastapi.staticfiles import StaticFiles from starlette.middleware import Middleware from starlette.middleware.sessions import SessionMiddleware from . import monitoring from .api import login, users, services from .views import exceptions, account, notifications, settings from .settings import SENTRY_DSN, ESS_NOTIFY_SERVER_ENVIRONMENT, SECRET_KEY # The following logging setup assumes the app is run with gunicorn gunicorn_error_logger = logging.getLogger("gunicorn.error") uvicorn_access_logger = logging.getLogger("uvicorn.access") uvicorn_access_logger.handlers = gunicorn_error_logger.handlers logger.handlers = gunicorn_error_logger.handlers logger.setLevel(gunicorn_error_logger.level) # Main application to serve HTML middleware = [ Middleware(SessionMiddleware, secret_key=SECRET_KEY, session_cookie="notify_session") ] app = FastAPI(exception_handlers=exceptions.exception_handlers, middleware=middleware) app.include_router(account.router) app.include_router(notifications.router, prefix="/notifications") app.include_router(settings.router, prefix="/settings") # Serve static files app_dir = Path(__file__).parent.resolve()
from fastapi import FastAPI, HTTPException from fastapi.logger import logger from pydantic import BaseModel import numpy as np import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' from tensorflow.keras.models import load_model from typing import Dict import uvicorn from pickle import load import pandas as pd # Set the log level to what is configured in the environment. Default to 'DEBUG' logger.setLevel(os.getenv('LOG_LEVEL', 'DEBUG')) # create app instance app = FastAPI() model = load_model("model.h5", compile=False) scaler = load(open('scaler.pkl', 'rb')) # Build request schemas class PredictionDataIn(BaseModel): bitcoin_last_minute: Dict # Helper functions def sort_prediction_request(feature_dict):
def get_logger(level=logging.DEBUG): logger.setLevel(level) return logger
async def startup_event(): if settings.DEBUG: logger.setLevel(logging.DEBUG) await database.connect() await RdsOpr.start()
from BO.Preferences import Preferences from BO.Process import ProcessBO from BO.Project import ProjectBO, ProjectTaxoStats, ProjectUserStats from BO.Rights import RightsBO from BO.Sample import SampleBO from BO.Taxonomy import TaxonBO from helpers.Asyncio import async_bg_run, log_streamer from helpers.DynamicLogs import get_logger from helpers.fastApiUtils import internal_server_error_handler, dump_openapi, get_current_user, RightsThrower, \ get_optional_current_user, MyORJSONResponse, ValidityThrower from helpers.login import LoginService logger = get_logger(__name__) # TODO: A nicer API doc, see https://github.com/tiangolo/fastapi/issues/1140 fastapi_logger.setLevel(INFO) app = FastAPI( title="EcoTaxa", version="0.0.6", # openapi URL as seen from navigator openapi_url="/api/openapi.json", # root_path="/API_models" default_response_class=MyORJSONResponse) # Instrument a bit add_timing_middleware(app, record=logger.info, prefix="app", exclude="untimed") # HTML stuff # app.mount("/styles", StaticFiles(directory="pages/styles"), name="styles") templates = Jinja2Templates(directory=os.path.dirname(__file__) +
import json import logging from datetime import timedelta, datetime from time import time from fastapi.requests import Request from fastapi import Body from fastapi.logger import logger logger.setLevel(logging.INFO) async def api_logger(request: Request, response=None, error=None): time_format = "%Y/%m/%d %H:%M:%S" t = time() - request.state.start status_code = error.status_code if error else response.status_code error_log = None user = request.state.user if error: if request.state.inspect: frame = request.state.inspect error_file = frame.f_code.co_filename error_func = frame.f_code.co_name error_line = frame.f_lineno else: error_func = error_file = error_line = "UNKNOWN" error_log = dict( errorFunc=error_func, location="{} line in {}".format(str(error_line), error_file), raised=str(error.__class__.__name__), msg=str(error.ex),
from fastapi import APIRouter, Body from fastapi.encoders import jsonable_encoder # folowing is for logging from fastapi.logger import logger import logging import sys import traceback gunicorn_logger = logging.getLogger('gunicorn.error') logger.handlers = gunicorn_logger.handlers if __name__ != "main": logger.setLevel(gunicorn_logger.level) else: logger.setLevel(logging.DEBUG) #above is for logging from person_database import ( add_person, delete_person, retrieve_person, retrieve_persons, update_person, ) from models.person import ( ErrorResponseModel, ResponseModel, PersonSchema, UpdatePersonModel, )
import os import logging from fastapi import FastAPI from fastapi.logger import logger as api_logger from databases.core import logger as db_logger from db import pg_db, mongo_db from finances import fh, fm from routes import router gunicorn_logger = logging.getLogger("gunicorn.error") api_logger.handlers = gunicorn_logger.handlers api_logger.setLevel(os.getenv("LOG_LEVEL", "INFO").upper()) db_logger.handlers = gunicorn_logger.handlers db_logger.setLevel(os.getenv("LOG_LEVEL", "INFO").upper()) app = FastAPI(redoc_url=None) app.include_router(router) @app.on_event("startup") async def startup(): await pg_db.connect() @app.on_event("shutdown") async def shutdown(): await pg_db.disconnect() mongo_db.close() await fh.close() await fm.close()
from logging import getLogger from fastapi import FastAPI, Request, status from fastapi.exceptions import RequestValidationError from fastapi.logger import logger as fastapi_logger from fastapi.responses import ORJSONResponse from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.middleware.cors import CORSMiddleware from domain.entity import now from infra.router import api as api_router app = FastAPI() logger = getLogger("gunicorn.error") fastapi_logger.handlers = logger.handlers fastapi_logger.setLevel(logger.level) app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) @app.exception_handler(StarletteHTTPException) async def http_exception_handler(request: Request, exc: StarletteHTTPException): return ORJSONResponse( status_code=exc.status_code,
import os import uuid from os import listdir from pathlib import Path from datasette_auth_github import GitHubAuth from fastapi import FastAPI, HTTPException, BackgroundTasks from fastapi.logger import logger from pydantic import BaseModel from starlette.middleware.cors import CORSMiddleware from starlette.responses import FileResponse from .core.generate_data import (TestDataGenerator, get_bundle_path, notify_upload) logger.handlers = logging.getLogger('uvicorn.error').handlers logger.setLevel(int(os.environ.get('LOG_LEVEL', logging.INFO))) BUNDLE_DIR = os.environ.get('BUNDLE_DIR', '/BUNDLE_DIR') HOST_URL = os.environ.get('HOST_URL', 'http://testbuild:8000') GH_AUTH_CLIENT_ID = os.getenv('GH_AUTH_CLIENT_ID') GH_AUTH_CLIENT_SECRET = os.getenv('GH_AUTH_CLIENT_SECRET') ALLOW_GH_ORGS = (os.getenv('ALLOW_GH_ORGS') or 'Ansible').split(',') class BundleConfig(BaseModel): unified_jobs: int = 1 job_events: int = 1 tasks_count: int = 100 orgs_count: int = 1 templates_count: int = 1 spread_days_back: int = 100