def prepare_models(root_dir: str = '/models'): model_configs = Configs(models_dir=root_dir) env_configs = EnvConfigs() rec_name = env_configs.models.rec_name det_name = env_configs.models.det_name ga_name = env_configs.models.ga_name mask_detector = env_configs.models.mask_detector max_size = env_configs.defaults.max_size if max_size is None: max_size = [640, 640] max_size = validate_max_size(max_size) models = [ model for model in [det_name, rec_name, ga_name, mask_detector] if model is not None ] for model in models: batch_size = 1 if model_configs.models[model].get('allow_batching'): if model == det_name: batch_size = env_configs.models.det_batch_size else: batch_size = env_configs.models.rec_batch_size logging.info(f"Preparing '{model}' model...") prepare_backend(model_name=model, backend_name=env_configs.models.backend_name, im_size=max_size, force_fp16=env_configs.models.fp16, max_batch_size=batch_size, config=model_configs) logging.info(f"'{model}' model ready!")
from fastapi.responses import UJSONResponse from fastapi.openapi.docs import ( get_redoc_html, get_swagger_ui_html, get_swagger_ui_oauth2_redirect_html, ) from modules.processing import Processing from env_parser import EnvConfigs __version__ = "0.5.9" dir_path = os.path.dirname(os.path.realpath(__file__)) # Read runtime settings from environment variables configs = EnvConfigs() logging.basicConfig( level=configs.log_level, format='%(asctime)s %(levelname)s - %(message)s', datefmt='[%H:%M:%S]', ) processing = Processing(det_name=configs.models.det_name, rec_name=configs.models.rec_name, ga_name=configs.models.ga_name, device=configs.models.device, max_size=configs.defaults.max_size, max_rec_batch_size=configs.models.rec_batch_size, backend_name=configs.models.backend_name, force_fp16=configs.models.fp16)