def init_roles(self) -> None: current_roles = {role.name: role for role in self.get_roles()} role_names = list(self.roles_data.values()) num_of_roles = len(role_names) num_of_unique_roles = len(list(set(role_names))) if num_of_roles != num_of_unique_roles: print_and_exit("Found duplicated role names: {}", str(sorted(role_names))) for role_name in self.roles: description = self.roles_data.get(role_name, ROLE_DISABLED) if r := current_roles.get(role_name): if r.description == description: log.info("Role {} already exists", role_name) else: log.info("Role {} already exists, updating description", role_name) r.description = description self.save_role(r) else: log.info("Creating role: {}", role_name) self.create_role(name=role_name, description=description)
def unpack_token(cls, token: str, raiseErrors: bool = False) -> Optional[Payload]: try: if cls.JWT_SECRET: return jwt.decode(token, cls.JWT_SECRET, algorithms=[cls.JWT_ALGO]) else: print_and_exit( # pragma: no cover "Server misconfiguration, missing jwt configuration" ) # now > exp except ExpiredSignatureError as e: # should this token be invalidated into the DB? if raiseErrors: raise e else: log.info("Unable to decode JWT token. {}", e) # now < nbf except ImmatureSignatureError as e: if raiseErrors: raise e else: log.info("Unable to decode JWT token. {}", e) except Exception as e: if raiseErrors: raise e else: log.warning("Unable to decode JWT token. {}", e) return None
def start(self): self.updater.start_polling(read_latency=5) self.admins_broadcast("Bot is ready to accept requests") log.info("Bot is ready to accept requests") self.updater.idle() print_and_exit("Bot closed") # pragma: no cover
def __init__(self): self.commands = {} self.variables = Env.load_variables_group(prefix="telegram") if not self.variables.get("api_key"): # pragma: no cover raise ServiceUnavailable("Missing API KEY") self.updater = Updater( self.variables.get("api_key"), # Starting from v13 use_context is True by default # use_context=True, workers=Env.to_int(self.variables.get("workers"), default=1), ) # Inline keyboard callback self.updater.dispatcher.add_handler( CallbackQueryHandler(self.inline_keyboard_button)) # Errors self.updater.dispatcher.add_error_handler(self.error_callback) self.admins = Bot.get_ids(self.variables.get("admins")) if not self.admins: # pragma: no cover print_and_exit("No admin list") self.users = Bot.get_ids(self.variables.get("users")) self.api = BotApiClient(self.variables)
def launch() -> None: # pragma: no cover """Launch the RAPyDo-based HTTP API server""" mywait() if initializing(): print_and_exit( "Please wait few more seconds: initialization is still in progress" ) current_app = Env.get("FLASK_APP", "").strip() if not current_app: os.environ["FLASK_APP"] = f"{current_package}.__main__" args = [ "run", "--host", BIND_INTERFACE, "--port", Env.get("FLASK_PORT", "8080"), "--reload", "--no-debugger", "--eager-loading", "--with-threads", ] # Call to untyped function "FlaskGroup" in typed context fg_cli = FlaskGroup() # type: ignore # Call to untyped function "main" in typed context fg_cli.main(prog_name="restapi", args=args) # type: ignore log.warning("Server shutdown")
def import_models(name: str, package: str, mandatory: bool = False) -> Dict[str, Type[Any]]: if package == BACKEND_PACKAGE: module_name = f"{package}.connectors.{name}.models" else: module_name = f"{package}.models.{name}" try: module = Meta.get_module_from_string(module_name, exit_on_fail=True) except BaseException as e: module = None if mandatory: log.critical(e) if not module: if mandatory: print_and_exit("Cannot load {} models from {}", name, module_name) return {} return Meta.get_new_classes_from_module(module)
def load_default_user() -> None: BaseAuthentication.default_user = Env.get("AUTH_DEFAULT_USERNAME", "") BaseAuthentication.default_password = Env.get("AUTH_DEFAULT_PASSWORD", "") if (not BaseAuthentication.default_user or not BaseAuthentication.default_password): # pragma: no cover print_and_exit("Default credentials are unavailable!")
def get_service_address(variables, host_var, port_var, service): host = variables.get(host_var) if host is None: print_and_exit("Cannot find any variable matching {} for {}", host_var, service) port = variables.get(port_var) if port is None: print_and_exit("Cannot find any variable matching {} for {}", port_var, service) log.info("Connecting to {} ({}:{})...", service, host, port) return host, int(port)
def mywait() -> None: """ Wait for a service on his host:port configuration This check is merely based on a socket connection """ for name, variables in Connector.services.items(): if name == "smtp" or name == "ftp": log.info("Service {} is enabled but not tested at startup time", name) continue if name == "celery": broker = variables.get("broker_service", "N/A") if broker == "RABBIT": service_vars = Env.load_variables_group(prefix="rabbitmq") elif broker == "REDIS": service_vars = Env.load_variables_group(prefix="redis") else: print_and_exit("Invalid celery broker: {}", broker) # pragma: no cover label = f"{broker.lower()} as celery broker" host, port = get_service_address(service_vars, "host", "port", label) wait_socket(host, port, label) backend = variables.get("backend_service", "N/a") if backend == "RABBIT": service_vars = Env.load_variables_group(prefix="rabbitmq") elif backend == "REDIS": service_vars = Env.load_variables_group(prefix="redis") else: print_and_exit("Invalid celery backend: {}", backend) # pragma: no cover label = f"{backend.lower()} as celery backend" host, port = get_service_address(service_vars, "host", "port", label) wait_socket(host, port, label) else: host, port = get_service_address(variables, "host", "port", name) if host != "nohost": wait_socket(host, port, name)
def mywait(): """ Wait for a service on his host:port configuration basing the check on a socket connection. """ for name, variables in Connector.services.items(): if name == "smtp": continue if name == "celery": broker = variables.get("broker", "N/A") if broker == "RABBIT": service_vars = Env.load_variables_group(prefix="rabbitmq") elif broker == "REDIS": service_vars = Env.load_variables_group(prefix="redis") else: print_and_exit("Invalid celery broker: {}", broker) # pragma: no cover label = f"{broker.lower()} as celery broker" host, port = get_service_address(service_vars, "host", "port", label) wait_socket(host, port, label) backend = variables.get("backend", "N/a") # Rabbit is no longer used as backend due to the strong limitations if backend == "RABBIT": # pragma: no cover service_vars = Env.load_variables_group(prefix="rabbitmq") elif backend == "REDIS": service_vars = Env.load_variables_group(prefix="redis") elif backend == "MONGODB": service_vars = Env.load_variables_group(prefix="mongo") else: print_and_exit( "Invalid celery backend: {}", backend ) # pragma: no cover label = f"{backend.lower()} as celery backend" host, port = get_service_address(service_vars, "host", "port", label) wait_socket(host, port, label) else: host, port = get_service_address(variables, "host", "port", name) wait_socket(host, port, name)
def init_app(app: Flask, worker_mode: bool = False) -> None: Connector.app = app if Connector.authentication_service == NO_AUTH: return if ( Connector.authentication_service not in Connector.services ): # pragma: no cover print_and_exit( "Auth service '{}' is not available", Connector.authentication_service ) authentication_instance = Connector.get_authentication_instance() authentication_instance.module_initialization()
def wrapper(*args: Any, **kwargs: Any) -> Any: # Recover the auth object auth_type, token = HTTPTokenAuth.get_authorization_token( allow_access_token_parameter=allow_access_token_parameter) # Internal API 'self' reference caller = Meta.get_self_reference_from_args(*args) if caller is None: # pragma: no cover # An exit here is really really dangerous, but even if # get_self_reference_from_args can return None, this case is quite # impossible... however with None the server can't continue! print_and_exit( "Server misconfiguration, self reference can't be None!" ) if (auth_type is not None and auth_type == HTTPAUTH_SCHEME and request.method != "OPTIONS"): # valid, token, jti, user valid, token, _, user = caller.auth.verify_token(token) # Check authentication. Optional authentication is valid if: # 1) token is missing # 2) token is valid # Invalid tokens are rejected if not valid: # Clear TCP receive buffer of any pending data _ = request.data # Mimic the response from a normal endpoint # To use the same standards # log.info("Invalid token received '{}'", token) log.debug("Invalid token received") return caller.response( "Invalid token received", headers=HTTPAUTH_ERR_HEADER, code=401, allow_html=True, ) caller.authorized_user = user.uuid kwargs["user"] = user request.environ[TOKEN_VALIDATED_KEY] = True else: kwargs["user"] = None return func(*args, **kwargs)
def verify(services): """Verify connected service""" if len(services) == 0: log.warning("Empty list of services, nothing to be verified.") log.info("Provide list of services by using --services option") for service in services: if not Connector.check_availability(service): print_and_exit("Service {} not detected", service) log.info("Verifying service: {}", service) variables = Connector.services.get(service, {}) host, port = get_service_address(variables, "host", "port", service) wait_socket(host, port, service) log.info("Completed successfully")
def load_roles() -> None: BaseAuthentication.roles_data = get_project_configuration( "variables.roles" ).copy() if not BaseAuthentication.roles_data: # pragma: no cover print_and_exit("No roles configured") BaseAuthentication.default_role = BaseAuthentication.roles_data.pop("default") BaseAuthentication.roles = [] for role, description in BaseAuthentication.roles_data.items(): if description != ROLE_DISABLED: BaseAuthentication.roles.append(role) if not BaseAuthentication.default_role: # pragma: no cover print_and_exit( "Default role {} not available!", BaseAuthentication.default_role )
def verify(service: str) -> None: """Verify if a service is connected""" if not Connector.check_availability(service): print_and_exit("Service {} not detected", service) log.info("Verifying service: {}", service) variables = Connector.services.get(service, {}) host, port = get_service_address(variables, "host", "port", service) if host != "nohost": wait_socket(host, port, service) connector_module = Connector.get_module(service, BACKEND_PACKAGE) if not connector_module: # pragma: no cover print_and_exit("Connector {} not detected", service) c = connector_module.get_instance() log.info("{} successfully authenticated on {}", service, c.variables.get("host", service))
def load_configuration(self) -> Dict[str, Any]: # Reading configuration CONF_FOLDERS = Env.load_variables_group(prefix="project_confs") defaults_path = Path(CONF_FOLDERS.get("defaults_path", CONF_PATH)) base_path = Path(CONF_FOLDERS.get("base_path", CONF_PATH)) projects_path = Path(CONF_FOLDERS.get("projects_path", CONF_PATH)) submodules_path = Path(CONF_FOLDERS.get("submodules_path", CONF_PATH)) try: configuration, self._extended_project, _ = read_configuration( default_file_path=defaults_path, base_project_path=base_path, projects_path=projects_path, submodules_path=submodules_path, ) except AttributeError as e: # pragma: no cover print_and_exit(str(e)) return configuration
def tests(wait, core, file, folder, destroy): # pragma: no cover """Compute tests and coverage""" if wait: while initializing(): log.debug("Waiting services initialization") time.sleep(5) mywait() num_opt = 0 if core: num_opt += 1 if file is not None: num_opt += 1 if folder is not None: num_opt += 1 if num_opt > 1: print_and_exit( "Please specify only one option between --core, --file and --folder" ) parameters = ["tests/tests.sh"] if core: parameters.append(current_package) else: parameters.append(CUSTOM_PACKAGE) if file is not None: # Can't be enabled due to mistral stuck at py38 # file = file.removeprefix("tests/") if file.startswith("tests/"): file = file[6:] if not os.path.isfile(os.path.join("tests", file)): print_and_exit("File not found: {}", file) parameters.append(file) elif folder is not None: if not os.path.isdir(os.path.join("tests", folder)): print_and_exit("Folder not found: {}", folder) parameters.append(folder) os.environ["TEST_CORE_ENABLED"] = str(core) # In prod mode tests are execute with the server running. # Destroy test fails with alchemy due to db locks if destroy and not PRODUCTION: os.environ["TEST_DESTROY_MODE"] = "1" try: log.info("Running tests... this may take some time") log.debug("Executing: {}", parameters) from plumbum import local command = local["bash"] command(parameters, stdout=sys.stdout, stderr=sys.stderr) sys.exit(0) except Exception as e: log.error(e) sys.exit(1)
def skip_endpoint(depends_on): for var in depends_on: pieces = var.strip().split(" ") pieces_num = len(pieces) if pieces_num == 1: dependency = pieces.pop() negate = False elif pieces_num == 2: neg, dependency = pieces negate = neg.lower() == "not" else: # pragma: no cover print_and_exit("Wrong depends_on parameter: {}", var) check = Env.get_bool(dependency) if negate: check = not check # Skip if not meeting the requirements of the dependency if not check: return True, dependency return False, None
def decorator(func: EndpointFunction) -> EndpointFunction: specs: Dict[str, Any] = {} specs["summary"] = summary specs["description"] = description specs_responses: Dict[str, Dict[str, str]] = {} if responses: for code, message in responses.items(): specs_responses[str(code)] = {"description": message} specs["responses"] = specs_responses if not path.startswith("/"): normalized_path = f"/{path}" else: normalized_path = path if not normalized_path.startswith( API_URL) and not normalized_path.startswith(AUTH_URL): normalized_path = f"{API_URL}{normalized_path}" if hasattr(func, "uri"): # pragma: no cover print_and_exit( "Unsupported multiple endpoint mapping found: {}, {}", getattr(func, "uri"), normalized_path, ) setattr(func, "uri", normalized_path) inject_apispec_docs(func, specs, None) @wraps(func) def wrapper(self: Any, *args: Any, **kwargs: Any) -> EndpointFunction: return cast(EndpointFunction, func(self, *args, **kwargs)) return cast(EndpointFunction, wrapper)
def launch(): # pragma: no cover """Launch the RAPyDo-based HTTP API server""" mywait() args = [ "run", "--host", BIND_INTERFACE, "--port", os.getenv("FLASK_PORT"), "--reload", "--no-debugger", "--eager-loading", "--with-threads", ] if initializing(): print_and_exit( "Please wait few more seconds: initialization is still in progress" ) else: main(args) log.warning("Server shutdown")
def load_roles() -> None: empty_dict: Dict[str, str] = {} BaseAuthentication.roles_data = glom(mem.configuration, "variables.roles", default=empty_dict).copy() if not BaseAuthentication.roles_data: # pragma: no cover print_and_exit("No roles configured") BaseAuthentication.default_role = BaseAuthentication.roles_data.pop( "default", "") BaseAuthentication.role_descriptions = glom( mem.configuration, "variables.roles_descriptions", default=empty_dict).copy() if not BaseAuthentication.default_role: # pragma: no cover print_and_exit("Default role not available!") BaseAuthentication.roles = [] for role, description in BaseAuthentication.roles_data.items(): if description != ROLE_DISABLED: BaseAuthentication.roles.append(role)
def read_configuration( default_file_path: Path, base_project_path: Path, projects_path: Path, submodules_path: Path, ) -> Tuple[ConfigurationType, Optional[str], Optional[Path]]: """ Read default configuration """ custom_configuration = load_yaml_file( base_project_path.joinpath(PROJECT_CONF_FILENAME)) # Verify custom project configuration project = custom_configuration.get("project") # Can't be tested because it is included in default configuration if project is None: # pragma: no cover raise AttributeError("Missing project configuration") base_configuration = load_yaml_file( default_file_path.joinpath(PROJECTS_DEFAULTS_FILE)) extended_project = project.get("extends") if extended_project is None: # Mix default and custom configuration return mix(base_configuration, custom_configuration), None, None extends_from = project.get("extends_from", "projects") if extends_from == "projects": extend_path = projects_path elif extends_from.startswith("submodules/"): # pragma: no cover repository_name = (extends_from.split("/")[1]).strip() if repository_name == "": print_and_exit( "Invalid repository name in extends_from, name is empty") extend_path = submodules_path else: # pragma: no cover suggest = "Expected values: 'projects' or 'submodules/${REPOSITORY_NAME}'" print_and_exit("Invalid extends_from parameter: {}.\n{}", extends_from, suggest) if not extend_path.exists(): # pragma: no cover print_and_exit("From project not found: {}", str(extend_path)) extend_file = Path(f"extended_{PROJECT_CONF_FILENAME}") extended_configuration = load_yaml_file(extend_path.joinpath(extend_file)) m1 = mix(base_configuration, extended_configuration) return mix(m1, custom_configuration), extended_project, extend_path
def wrapper(*args: Any, **kwargs: Any) -> Any: # Recover the auth object auth_type, token = HTTPTokenAuth.get_authorization_token( allow_access_token_parameter=allow_access_token_parameter) # Internal API 'self' reference caller = Meta.get_self_reference_from_args(*args) if caller is None: # pragma: no cover # An exit here is really really dangerous, but even if # get_self_reference_from_args can return None, this case is quite # impossible... however with None the server can't continue! print_and_exit( "Server misconfiguration, self reference can't be None!" ) if auth_type is None or auth_type != HTTPAUTH_SCHEME: # Wrong authentication string msg = ( "Missing credentials in headers" f", e.g. {HTTPAUTH_AUTH_FIELD}: '{HTTPAUTH_SCHEME} TOKEN'" ) log.debug("Unauthorized request: missing credentials") return caller.response(msg, code=401, headers=HTTPAUTH_ERR_HEADER, allow_html=True) # Handling OPTIONS forwarded to our application: # ignore headers and let go, avoid unwanted interactions with CORS if request.method != "OPTIONS": # valid, token, jti, user valid, token, _, user = caller.auth.verify_token(token) # Check authentication if not valid: # Clear TCP receive buffer of any pending data _ = request.data # Mimic the response from a normal endpoint # To use the same standards # log.info("Invalid token received '{}'", token) log.debug("Invalid token received") return caller.response( "Invalid token received", headers=HTTPAUTH_ERR_HEADER, code=401, allow_html=True, ) request.environ[TOKEN_VALIDATED_KEY] = True # Check roles if not caller.auth.verify_roles( user, roles, required_roles=required_roles): log.info("Unauthorized request: missing privileges.") return caller.response( "You are not authorized: missing privileges", code=401, allow_html=True, ) caller.authorized_user = user.uuid kwargs["user"] = user return func(*args, **kwargs)
def import_secret(cls, abs_filename: str) -> None: try: cls.JWT_SECRET = open(abs_filename, "rb").read() except OSError: # pragma: no cover print_and_exit("Jwt secret file {} not found", abs_filename)
def create_app( name: str = __name__, mode: ServerModes = ServerModes.NORMAL, options: Optional[Dict[str, bool]] = None, ) -> Flask: """ Create the server istance for Flask application """ if PRODUCTION and TESTING and not FORCE_PRODUCTION_TESTS: # pragma: no cover print_and_exit("Unable to execute tests in production") # TERM is not catched by Flask # https://github.com/docker/compose/issues/4199#issuecomment-426109482 # signal.signal(signal.SIGTERM, teardown_handler) # SIGINT is registered as STOPSIGNAL in Dockerfile signal.signal(signal.SIGINT, teardown_handler) # Flask app instance # template_folder = template dir for output in HTML microservice = Flask( name, template_folder=os.path.join(ABS_RESTAPI_PATH, "templates") ) # CORS if not PRODUCTION: cors = CORS( allow_headers=[ "Content-Type", "Authorization", "X-Requested-With", "x-upload-content-length", "x-upload-content-type", "content-range", ], supports_credentials=["true"], methods=["GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"], ) cors.init_app(microservice) log.debug("CORS Injected") # Flask configuration from config file microservice.config.from_object(config) log.debug("Flask app configured") if PRODUCTION: log.info("Production server mode is ON") endpoints_loader = EndpointsLoader() mem.configuration = endpoints_loader.load_configuration() mem.initializer = Meta.get_class("initialization", "Initializer") if not mem.initializer: # pragma: no cover print_and_exit("Invalid Initializer class") mem.customizer = Meta.get_instance("customization", "Customizer") if not mem.customizer: # pragma: no cover print_and_exit("Invalid Customizer class") if not isinstance(mem.customizer, BaseCustomizer): # pragma: no cover print_and_exit("Invalid Customizer class, it should inherit BaseCustomizer") Connector.init_app(app=microservice, worker_mode=(mode == ServerModes.WORKER)) # Initialize reading of all files mem.geo_reader = geolite2.reader() # when to close?? # geolite2.close() if mode == ServerModes.INIT: Connector.project_init(options=options) if mode == ServerModes.DESTROY: Connector.project_clean() # Restful plugin with endpoint mapping (skipped in INIT|DESTROY|WORKER modes) if mode == ServerModes.NORMAL: logging.getLogger("werkzeug").setLevel(logging.ERROR) # ignore warning messages from apispec warnings.filterwarnings( "ignore", message="Multiple schemas resolved to the name " ) mem.cache = Cache.get_instance(microservice) endpoints_loader.load_endpoints() mem.authenticated_endpoints = endpoints_loader.authenticated_endpoints mem.private_endpoints = endpoints_loader.private_endpoints # Triggering automatic mapping of REST endpoints rest_api = Api(catch_all_404s=True) for endpoint in endpoints_loader.endpoints: # Create the restful resource with it; # this method is from RESTful plugin rest_api.add_resource(endpoint.cls, *endpoint.uris) # HERE all endpoints will be registered by using FlaskRestful rest_api.init_app(microservice) # APISpec configuration api_url = get_backend_url() scheme, host = api_url.rstrip("/").split("://") spec = APISpec( title=get_project_configuration( "project.title", default="Your application name" ), version=get_project_configuration("project.version", default="0.0.1"), openapi_version="2.0", # OpenApi 3 not working with FlaskApiSpec # -> Duplicate parameter with name body and location body # https://github.com/jmcarp/flask-apispec/issues/170 # Find other warning like this by searching: # **FASTAPI** # openapi_version="3.0.2", plugins=[MarshmallowPlugin()], host=host, schemes=[scheme], tags=endpoints_loader.tags, ) # OpenAPI 3 changed the definition of the security level. # Some changes needed here? api_key_scheme = {"type": "apiKey", "in": "header", "name": "Authorization"} spec.components.security_scheme("Bearer", api_key_scheme) microservice.config.update( { "APISPEC_SPEC": spec, # 'APISPEC_SWAGGER_URL': '/api/swagger', "APISPEC_SWAGGER_URL": None, # 'APISPEC_SWAGGER_UI_URL': '/api/swagger-ui', # Disable Swagger-UI "APISPEC_SWAGGER_UI_URL": None, } ) mem.docs = FlaskApiSpec(microservice) # Clean app routes ignore_verbs = {"HEAD", "OPTIONS"} for rule in microservice.url_map.iter_rules(): endpoint = microservice.view_functions[rule.endpoint] if not hasattr(endpoint, "view_class"): continue newmethods = ignore_verbs.copy() rulename = str(rule) for verb in rule.methods - ignore_verbs: method = verb.lower() if method in endpoints_loader.uri2methods[rulename]: # remove from flask mapping # to allow 405 response newmethods.add(verb) rule.methods = newmethods # Register swagger. Note: after method mapping cleaning with microservice.app_context(): for endpoint in endpoints_loader.endpoints: try: mem.docs.register(endpoint.cls) except TypeError as e: # pragma: no cover print(e) log.error("Cannot register {}: {}", endpoint.cls.__name__, e) # marshmallow errors handler microservice.register_error_handler(422, handle_marshmallow_errors) # Logging responses microservice.after_request(handle_response) if SENTRY_URL is not None: # pragma: no cover if PRODUCTION: sentry_sdk.init( dsn=SENTRY_URL, # already catched by handle_marshmallow_errors ignore_errors=[werkzeug.exceptions.UnprocessableEntity], integrations=[FlaskIntegration()], ) log.info("Enabled Sentry {}", SENTRY_URL) else: # Could be enabled in print mode # sentry_sdk.init(transport=print) log.info("Skipping Sentry, only enabled in PRODUCTION mode") log.info("Boot completed") return microservice
def load_connectors(cls, path: str, module: str, services: Services) -> Services: main_folder = os.path.join(path, CONNECTORS_FOLDER) if not os.path.isdir(main_folder): log.debug("Connectors folder not found: {}", main_folder) return services for connector in os.listdir(main_folder): connector_path = os.path.join(path, CONNECTORS_FOLDER, connector) if not os.path.isdir(connector_path): continue if connector.startswith("_"): continue # This is the only exception... we should rename sqlalchemy as alchemy if connector == "sqlalchemy": variables = Env.load_variables_group(prefix="alchemy") else: variables = Env.load_variables_group(prefix=connector) if not Env.to_bool( variables.get("enable_connector", True) ): # pragma: no cover log.info("{} connector is disabled", connector) continue # if host is not in variables (like for Celery) do not consider it external = False if "host" in variables: if host := variables.get("host"): external = cls.is_external(host) # HOST found in variables but empty... never happens during tests else: # pragma: no cover variables["enable"] = "0" enabled = Env.to_bool(variables.get("enable")) available = enabled or external if not available: continue connector_module = Connector.get_module(connector, module) connector_class = Connector.get_class(connector_module) # Can't test connector misconfiguration... if not connector_class: # pragma: no cover log.error("No connector class found in {}/{}", main_folder, connector) continue try: # This is to test the Connector compliance, # i.e. to verify instance and get_instance in the connector module # and verify that the Connector can be instanced connector_module.instance connector_module.get_instance connector_class() except AttributeError as e: # pragma: no cover print_and_exit(e) services[connector] = variables log.debug("Got class definition for {}", connector_class)
def destroy(self) -> None: # pragma: no cover print_and_exit("Missing destroy method in {}", self.__class__.__name__)
def initialize(self) -> None: # pragma: no cover print_and_exit("Missing initialize method in {}", self.__class__.__name__)
def load_connectors(cls, path: Path, module: str, services: Services) -> Services: main_folder = path.joinpath(CONNECTORS_FOLDER) if not main_folder.is_dir(): log.debug("Connectors folder not found: {}", main_folder) return services for connector in main_folder.iterdir(): if not connector.is_dir(): continue connector_name = connector.name if connector_name.startswith("_"): continue # This is the only exception... we should rename sqlalchemy as alchemy if connector_name == "sqlalchemy": variables = Env.load_variables_group(prefix="alchemy") else: variables = Env.load_variables_group(prefix=connector_name) if not Env.to_bool( variables.get("enable_connector", True) ): # pragma: no cover log.debug("{} connector is disabled", connector_name) continue external = False if "host" in variables: if host := variables.get("host"): external = cls.is_external(host) # HOST found in variables but empty... never happens during tests else: # pragma: no cover variables["enable"] = "0" enabled = Env.to_bool(variables.get("enable")) # Celery is always enabled, if connector is enabled # No further check is needed on host/external available = enabled or external or connector_name == "celery" if not available: continue connector_module = Connector.get_module(connector_name, module) connector_class = Connector.get_class(connector_module) # Can't test connector misconfiguration... if not connector_class: # pragma: no cover log.error("No connector class found in {}/{}", main_folder, connector) continue try: # This is to test the Connector compliance, # i.e. to verify instance and get_instance in the connector module # and verify that the Connector can be instanced connector_module.instance connector_module.get_instance connector_class() except AttributeError as e: # pragma: no cover print_and_exit(e) services[connector_name] = variables log.debug("Got class definition for {}", connector_class)
def load_endpoints_folder(self, base_dir): # Walk folders looking for endpoints for epclss in self.extract_endpoints(base_dir): if epclss.baseuri in BASE_URLS: base = epclss.baseuri else: log.warning("Invalid base {}", epclss.baseuri) base = API_URL base = base.strip("/") # Building endpoint endpoint = EndpointElements( uris=[], methods={}, cls=epclss, tags=epclss.labels, base_uri=base, private=epclss.private, ) # m = GET|PUT|POST|DELETE|PATCH|... for m in epclss.methods: # method_fn = get|post|put|delete|patch|... method_fn = m.lower() # get, post, put, patch, delete functions fn = getattr(epclss, method_fn) # Adding the catch_exceptions decorator to every endpoint decorator = decorators.catch_exceptions() setattr(epclss, method_fn, decorator(fn)) # auth.required injected by the required decorator in bearer.py auth_required = fn.__dict__.get("auth.required", False) # auth.optional injected by the optional decorator in bearer.py auth_optional = fn.__dict__.get("auth.optional", False) if not hasattr(fn, "uris"): # pragma: no cover print_and_exit( "Invalid {} endpoint in {}: missing endpoint decorator", method_fn, epclss.__name__, ) continue endpoint.methods[method_fn] = fn.uris for uri in fn.uris: full_uri = f"/{endpoint.base_uri}{uri}" self.authenticated_endpoints.setdefault(full_uri, {}) # method_fn is equivalent to m.lower() self.authenticated_endpoints[full_uri].setdefault( method_fn, auth_required) # Set default responses responses: Dict[str, Dict[str, str]] = {} responses.setdefault("400", ERR400) if auth_required: responses.setdefault("401", ERR401) responses.setdefault("404", ERR404_AUTH) elif auth_optional: responses.setdefault("401", ERR401) responses.setdefault("404", ERR404) else: responses.setdefault("404", ERR404) # inject _METHOD dictionaries into __apispec__ attribute # __apispec__ is normally populated by using @docs decorator inject_apispec_docs(fn, {"responses": responses}, epclss.labels) # This will be used by server.py.add endpoint.uris.append(full_uri) self.private_endpoints.setdefault(full_uri, {}) self.private_endpoints[full_uri].setdefault( method_fn, endpoint.private) # Used by server.py to remove unmapped methods self.uri2methods.setdefault(full_uri, []) self.uri2methods[full_uri].append(method_fn) # log.debug("Built definition '{}:{}'", m, full_uri) self._used_tags.update(endpoint.tags) self.endpoints.append(endpoint)