def _build_values( self, init_kwargs: Dict[str, Any], _env_file: Union[Path, str, None] = None, _env_file_encoding: Optional[str] = None, _secrets_dir: Union[Path, str, None] = None, _azure_keyvault: Union[str, None] = None, ) -> Dict[str, Any]: azure_keyvault = _azure_keyvault or self.__config__.azure_keyvault secret_client = self.__config__.get_azure_client(azure_keyvault) return deep_update( self._build_keyvault(secret_client), self._build_secrets_files(_secrets_dir), self._build_environ(_env_file, _env_file_encoding), init_kwargs, )
def update(self, config=None): """Update config with provided settings. Parameters ---------- config : string dict or `AnalysisConfig` object Configuration settings provided in dict() syntax. """ if isinstance(config, str): other = AnalysisConfig.from_yaml(config) elif isinstance(config, AnalysisConfig): other = config else: raise TypeError(f"Invalid type: {config}") config_new = deep_update(self.dict(exclude_defaults=True), other.dict(exclude_defaults=True)) return AnalysisConfig(**config_new)
def __init__( self, *, env_prefix: str = DEFAULT_ENV_PREFIX, env_delimiter: str = "_", filepath: Optional[Union[Path, str]] = None, ) -> None: if filepath is not None: filepath = Path(filepath) file_values = self._load_file_values(filepath) env_values = self._load_env_values(env_prefix, env_delimiter) try: super().__init__(**deep_update(file_values, env_values)) except ValidationError as error: logger.critical(error) sys.exit(1)
def _set_(self, dct, key, value): """Helper method to assign item to a given dictionary. Uses `_types` to type-check the value, before assigning. """ assert key not in self._dependencies, "Can't override dependency." # If we have type-information, we apply the pydantic-model to the value model = self._types.get(key) if model is not None: # If `settings.foo` is a pydantic model, we want to allow partial # assignment: `settings.foo = {"b": 1}` should only set `b` # Thus we check whether we are dealing with a pydantic model and if # we are also assigning a `dict`: type_ = model.__fields__[key].type_ if issubclass(type_, pydantic.BaseModel) and isinstance( value, dict): value = type_.parse_obj(deep_update(self[key].dict(), value)) else: value = getattr(model.parse_obj({key: value}), key) dct[key] = value
def test_deep_update(mapping, updating_mapping, expected_mapping, msg): assert deep_update(mapping, updating_mapping) == expected_mapping, msg
def create_visyn_server(*, fast_api_args: Optional[Dict] = {}, start_cmd: Optional[str] = None, workspace_config: Optional[Dict] = None) -> FastAPI: """ Create a new FastAPI instance while ensuring that the configuration and plugins are loaded, extension points are registered, database migrations are executed, ... Keyword arguments: fast_api_args: Optional dictionary of arguments directly passed to the FastAPI constructor. start_cmd: Optional start command for the server, i.e. db-migration exposes commands like `db-migration exec <..> upgrade head`. workspace_config: Optional override for the workspace configuration. If nothing is provided `load_workspace_config()` is used instead. """ from .. import manager from ..settings.model import GlobalSettings from ..settings.utils import load_workspace_config # Load the workspace config.json and initialize the global settings workspace_config = workspace_config if isinstance( workspace_config, dict) else load_workspace_config() manager.settings = GlobalSettings(**workspace_config) logging.config.dictConfig(manager.settings.tdp_core.logging) _log = logging.getLogger(__name__) _log.info("Workspace settings successfully loaded") # Load the initial plugins from ..plugin.parser import get_config_from_plugins, load_all_plugins plugins = load_all_plugins() # With all the plugins, load the corresponding configuration files and create a new model based on the global settings, with all plugin models as sub-models [plugin_config_files, plugin_settings_models] = get_config_from_plugins(plugins) visyn_server_settings = create_model("VisynServerSettings", __base__=GlobalSettings, **plugin_settings_models) # Patch the global settings by instantiating the new settings model with the global config, all config.json(s), and pydantic models manager.settings = visyn_server_settings( **deep_update(*plugin_config_files, workspace_config)) _log.info("All settings successfully loaded") app = FastAPI( debug=manager.settings.is_development_mode, title="Visyn Server", # TODO: Extract version from package.json version="1.0.0", docs_url="/api/docs", openapi_url="/api/openapi.json", redoc_url="/api/redoc", **fast_api_args, ) from ..middleware.exception_handler_middleware import ExceptionHandlerMiddleware from ..middleware.request_context_middleware import RequestContextMiddleware # TODO: For some reason, a @app.exception_handler(Exception) is not called here. We use a middleware instead. app.add_middleware(ExceptionHandlerMiddleware) # Store all globals also in app.state.<manager> to allow access in FastAPI routes via request.app.state.<manager>. app.state.settings = manager.settings # Initialize global managers. from ..plugin.registry import Registry app.state.registry = manager.registry = Registry() manager.registry.init_app(app, plugins) _log.info("Plugin registry successfully initialized") from ..dbmanager import DBManager app.state.db = manager.db = DBManager() manager.db.init_app(app) from ..dbmigration.manager import DBMigrationManager app.state.db_migration = manager.db_migration = DBMigrationManager() manager.db_migration.init_app( app, manager.registry.list("tdp-sql-database-migration")) from ..security.manager import create_security_manager app.state.security = manager.security = create_security_manager() manager.security.init_app(app) from ..id_mapping.manager import create_id_mapping_manager app.state.id_mapping = manager.id_mapping = create_id_mapping_manager() # TODO: Allow custom command routine (i.e. for db-migrations) from .cmd import parse_command_string alternative_start_command = parse_command_string(start_cmd) if alternative_start_command: _log.info(f"Received start command: {start_cmd}") alternative_start_command() _log.info("Successfully executed command, exiting server...") # TODO: How to properly exit here? Should a command support the "continuation" of the server, i.e. by returning True? sys.exit(0) # Load all namespace plugins as WSGIMiddleware plugins from .utils import init_legacy_app, load_after_server_started_hooks namespace_plugins = manager.registry.list("namespace") _log.info( f"Registering {len(namespace_plugins)} legacy namespaces via WSGIMiddleware" ) for p in namespace_plugins: _log.info(f"Registering legacy namespace: {p.namespace}") app.mount(p.namespace, WSGIMiddleware(init_legacy_app(p.load().factory()))) # Load all FastAPI apis router_plugins = manager.registry.list("fastapi_router") _log.info(f"Registering {len(router_plugins)} API-routers") # Load all namespace plugins as WSGIMiddleware plugins for p in router_plugins: _log.info(f"Registering router: {p.id}") app.include_router(p.load().factory()) # load `after_server_started` extension points which are run immediately after server started, # so all plugins should have been loaded at this point of time # the hooks are run in a separate (single) thread to not block the main execution of the server # TODO: Use FastAPI mechanism for that t = threading.Thread(target=load_after_server_started_hooks) t.daemon = True t.start() # TODO: Check mainapp.py what it does and transfer them here. Currently, we cannot mount a flask app at root, such that the flask app is now mounted at /app/ from .mainapp import build_info, health # Call init_app callback for every plugin for p in plugins: p.plugin.init_app(app) # Add middleware to access Request "outside" app.add_middleware(RequestContextMiddleware) # TODO: Move up? app.add_api_route("/health", health) app.add_api_route("/api/buildInfo.json", build_info) return app
def test_deep_update_is_not_mutating(): mapping = {'key': {'inner_key': {'deep_key': 1}}} updated_mapping = deep_update(mapping, {'key': {'inner_key': {'other_deep_key': 1}}}) assert updated_mapping == {'key': {'inner_key': {'deep_key': 1, 'other_deep_key': 1}}} assert mapping == {'key': {'inner_key': {'deep_key': 1}}}
def create_resource_claim(self, logger, workshop): logger.debug( f"Creating ResourceClaim for {self.name} in namespace {self.namespace}" ) try: catalog_item = CatalogItem.get( name=self.catalog_item_name, namespace=self.catalog_item_namespace, ) except kubernetes.client.rest.ApiException as e: if e.status == 404: raise kopf.TemporaryError( f"CatalogItem {self.catalog_item_name} was not found in namespace {self.catalog_item_namespace}.", delay=60) else: raise resource_claim_definition = { "apiVersion": f"{poolboy_domain}/{poolboy_api_version}", "kind": "ResourceClaim", "metadata": { "annotations": { catalog_display_name_annotation: catalog_item.catalog_display_name, catalog_item_display_name_annotation: catalog_item.display_name, notifier_annotation: "disable", }, "generateName": f"{catalog_item.name}-", "labels": { catalog_item_name_label: catalog_item.name, catalog_item_namespace_label: catalog_item.namespace, workshop_label: workshop.name, workshop_provision_label: self.name, }, "namespace": f"{self.namespace}", "ownerReferences": [{ "apiVersion": f"{babylon_domain}/{babylon_api_version}", "controller": True, "kind": "WorkshopProvision", "name": self.name, "uid": self.uid, }] }, "spec": { "resources": deepcopy(catalog_item.resources) } } if workshop.requester: resource_claim_definition['metadata']['annotations'][ requester_annotation] = workshop.requester if catalog_item.lab_ui_type: resource_claim_definition['metadata']['labels'][ lab_ui_label] = catalog_item.lab_ui_type for catalog_item_parameter in catalog_item.parameters: value = self.parameters[catalog_item_parameter.name] \ if catalog_item_parameter.name in self.parameters else catalog_item_parameter.default if value == None and not catalog_item_parameter.required: continue if catalog_item_parameter.annotation: resource_claim_definition['metadata']['annotations'][ catalog_item_parameter.annotation] = str(value) if catalog_item_parameter.variable: for resource_index in catalog_item_parameter.resource_indexes: resource_claim_definition['spec']['resources'][ resource_index] = deep_update( resource_claim_definition['spec']['resources'] [resource_index], { 'template': { 'spec': { 'vars': { 'job_vars': { catalog_item_parameter.variable: value } } } } }) resource_claim_definition = custom_objects_api.create_namespaced_custom_object( poolboy_domain, poolboy_api_version, self.namespace, 'resourceclaims', resource_claim_definition) if workshop.service_url: url_prefix = re.sub(r'^(https?://[^/]+).*', r'\1', workshop.service_url) name = resource_claim_definition['metadata']['name'] resource_claim_definition = custom_objects_api.patch_namespaced_custom_object( poolboy_domain, poolboy_api_version, self.namespace, 'resourceclaims', name, { "metadata": { "annotations": { url_annotation: f"{url_prefix}/services/{self.namespace}/{name}" } } }) resource_claim = ResourceClaim(resource_claim_definition) logger.info( f"Created ResourceClaim for {resource_claim.name} " f"for WorkshopProvision {self.name} in namespace {self.namespace}") return resource_claim
def update_config(self, partial_config: dict) -> FtxArbitrageStrategyConfig: self.config = FtxArbitrageStrategyConfig( **deep_update(self.config.dict(), partial_config)) return self.get_config()