def _resolve_field_for_model(self, model: "Type[Model]", table: Table, field: str, *default_values: Any) -> dict: joins = [] fields = field.split("__") for iter_field in fields[:-1]: if iter_field not in model._meta.fetch_fields: raise ConfigurationError(f"{field} not resolvable") related_field = cast(RelationalField, model._meta.fields_map[iter_field]) joins.append((table, iter_field, related_field)) model = related_field.model_class related_table: Table = related_field.model_class._meta.basetable if isinstance(related_field, ForeignKeyFieldInstance): # Only FK's can be to same table, so we only auto-alias FK join tables related_table = related_table.as_( f"{table.get_table_name()}__{iter_field}") table = related_table last_field = fields[-1] if last_field in model._meta.fetch_fields: related_field = cast(RelationalField, model._meta.fields_map[last_field]) related_field_meta = related_field.model_class._meta joins.append((table, last_field, related_field)) related_table = related_field_meta.basetable if isinstance(related_field, BackwardFKRelation): if table == related_table: related_table = related_table.as_( f"{table.get_table_name()}__{last_field}") field = related_table[related_field_meta.db_pk_field] else: field_object = model._meta.fields_map[last_field] if field_object.source_field: field = table[field_object.source_field] else: field = table[last_field] if self.populate_field_object: self.field_object = model._meta.fields_map.get( last_field, None) if self.field_object: # pragma: nobranch func = self.field_object.get_for_dialect( model._meta.db.capabilities.dialect, "function_cast") if func: field = func(self.field_object, field) return { "joins": joins, "field": self._get_function_field(field, *default_values) }
def split_reference(reference: str) -> Tuple[str, str]: """ Test, if reference follow the official naming conventions. Throws a ConfigurationError with a hopefully helpful message. If successfull, returns the app and the model name. """ if "." not in reference: raise ConfigurationError( ("'%s' ist not a valid model reference." " Should be something like <appname>.<modelname>.") % reference) items = reference.split(".") if len(items) != 2: raise ConfigurationError( ("'%s' is not a valid model reference Bad Reference." " Should be something like <appname>.<modelname>.") % reference) return (items[0], items[1])
async def generate_schemas(cls) -> None: """ Generate schemas according to models provided to ``.init()`` method. Will fail if schemas already exists, so it's not recommended to be used as part of application workflow """ if not cls._inited: raise ConfigurationError( 'You have to call .init() first before generating schemas') for connection in cls._connections.values(): await generate_schema_for_client(connection)
def __init__(self, pool=None, connection=None): if pool and connection: raise ConfigurationError('You must pass either connection or pool') self._connection = connection self.log = logging.getLogger('db_client') self._pool = pool self.single_connection = True self._single_connection_class = type( 'SingleConnectionWrapper', (SingleConnectionWrapper, self.__class__), {} ) self._transaction_class = self.__class__
def _discover_client_class(cls, engine: str) -> BaseDBAsyncClient: # Let exception bubble up for transparency engine_module = importlib.import_module(engine) try: client_class = engine_module.client_class # type: ignore except AttributeError: raise ConfigurationError( 'Backend for engine "{}" does not implement db client'.format( engine)) return client_class
def __init__(self, pk: bool = False, unique: bool = False, index: bool = False, **kwargs: Any) -> None: if pk: warnings.warn( "TextField as a PrimaryKey is Deprecated, use CharField instead", DeprecationWarning, stacklevel=2, ) if unique: raise ConfigurationError( f"TextField doesn't support unique indexes, consider CharField or another strategy" ) if index: raise ConfigurationError( f"TextField can't be indexed, consider CharField") super().__init__(pk=pk, **kwargs)
def __init__( self, model_name: str, related_name: Union[Optional[str], Literal[False]] = None, on_delete: str = CASCADE, **kwargs: Any, ) -> None: super().__init__(None, **kwargs) # type: ignore if len(model_name.split(".")) != 2: raise ConfigurationError( 'Foreign key accepts model name in format "app.Model"') self.model_name = model_name self.related_name = related_name if on_delete not in {CASCADE, RESTRICT, SET_NULL}: raise ConfigurationError( "on_delete can only be CASCADE, RESTRICT or SET_NULL") if on_delete == SET_NULL and not bool(kwargs.get("null")): raise ConfigurationError( "If on_delete is SET_NULL, then field must have null=True set") self.on_delete = on_delete
def expand_db_url(db_url: str, testing: bool = False) -> dict: url = urlparse.urlparse(db_url) if url.scheme not in DB_LOOKUP: raise ConfigurationError('Unknown DB scheme: {}'.format(url.scheme)) db = DB_LOOKUP[url.scheme] if db.get('skip_first_char', True): path = url.path[1:] else: path = url.netloc + url.path if not path: raise ConfigurationError('No path specified for DB_URL') params = {} # type: dict for key, val in urlparse.parse_qs(url.query).items(): params[key] = val[-1] if testing: path = path.replace('\\{', '{').replace('\\}', '}') path = path.format(uuid.uuid4().hex) vmap = {} # type: dict vmap.update(db['vmap']) params[vmap['path']] = path if vmap.get('hostname'): params[vmap['hostname']] = str(url.hostname or '') try: if vmap.get('port'): params[vmap['port']] = str(url.port or '') except ValueError: raise ConfigurationError('Port is not an integer') if vmap.get('username'): params[vmap['username']] = str(url.username or '') if vmap.get('password'): params[vmap['password']] = str(url.password or '') return { 'engine': db['engine'], 'credentials': params, }
def add_field(self, name: str, field: Field): if name in self.fields_map: raise ConfigurationError(f"Field {name} already present in meta") field.model_field_name = name field.model = self._model self.fields_map[name] = field if field.has_db_column: if not field.db_column: field.db_column = name self.field_to_db_column_name_map[name] = field.db_column
def get_app_model(self, app_label: str, model_name: str): """ Get model object for the given app_label, model_name if exists. Throws ConfigurationError if the app_label is not registered or the model_name is not registered within the app. :param app_label: the app label :param model_name: the model name :return: the model object """ if app_label not in self._app_models_map: raise ConfigurationError( f"No app with name '{app_label}' registered.") related_app = self._app_models_map[app_label] if model_name not in related_app: raise ConfigurationError( f"No model with name '{model_name}' registered in app '{app_label}'." ) return related_app[model_name]
async def _drop_databases(cls) -> None: """ Tries to drop all databases provided in config passed to ``.init()`` method. Normally should be used only for testing purposes. """ if not cls._inited: raise ConfigurationError('You have to call .init() first before deleting schemas') for connection in cls._connections.values(): await connection.close() await connection.db_delete() cls._connections = {} await cls._reset_apps()
def __init__( self, source_field: Optional[str] = None, generated: bool = False, pk: bool = False, null: bool = False, default: Any = None, unique: bool = False, index: bool = False, description: Optional[str] = None, model: "Optional[Model]" = None, validators: Optional[List[Union[Validator, Callable]]] = None, **kwargs: Any, ) -> None: # TODO: Rename pk to primary_key, alias pk, deprecate # TODO: Rename index to db_index, alias index, deprecate if not self.indexable and (unique or index): raise ConfigurationError( f"{self.__class__.__name__} can't be indexed") if pk and null: raise ConfigurationError( f"{self.__class__.__name__} can't be both null=True and pk=True" ) if pk: index = True unique = True self.source_field = source_field self.generated = generated self.pk = pk self.default = default self.null = null self.unique = unique self.index = index self.model_field_name = "" self.description = description self.docstring: Optional[str] = None self.validators: List[Union[Validator, Callable]] = validators or [] # TODO: consider making this not be set from constructor self.model: Type["Model"] = model # type: ignore self.reference: "Optional[Field]" = None
def __init__(self, enum_type: Type[IntEnum], description: Optional[str] = None, **kwargs: Any) -> None: # Validate values for item in enum_type: try: value = int(item.value) except ValueError: raise ConfigurationError( "IntEnumField only supports integer enums!") if not 0 <= value < 32768: raise ConfigurationError( "The valid range of IntEnumField's values is 0..32767!") # Automatic description for the field if not specified by the user if description is None: description = "\n".join( [f"{e.name}: {int(e.value)}" for e in enum_type])[:2048] super().__init__(description=description, **kwargs) self.enum_type = enum_type
async def drop_databases(self) -> None: """ Tries to drop all databases provided in config passed to ``.init()`` method. Normally should be used only for testing purposes. """ if not self._inited: raise ConfigurationError( "You have to call .init() first before deleting schemas") for db_client in self._db_client_map.values(): await db_client.close() await db_client.db_delete() self._reset()
def _set_kwargs(self, kwargs: dict) -> Set[str]: meta = self._meta # Assign values and do type conversions passed_fields = {*kwargs.keys()} | meta.fetch_fields for key, value in kwargs.items(): if key in meta.fk_fields or key in meta.o2o_fields: if value and not value._saved_in_db: raise OperationalError( f"You should first call .save() on {value} before referring to it" ) setattr(self, key, value) passed_fields.add(meta.fields_map[key].source_field) elif key in meta.backward_fk_fields: raise ConfigurationError( "You can't set backward relations through init, change related model instead" ) elif key in meta.backward_o2o_fields: raise ConfigurationError( "You can't set backward one to one relations through init," " change related model instead") elif key in meta.m2m_fields: raise ConfigurationError( "You can't set m2m relations through init, use m2m_manager instead" ) else: field_object = meta.fields_map.get(key) if field_object is None: continue if field_object.generated: self._custom_generated_pk = True if value is None and not field_object.null: raise ValueError( f"{key} is non nullable field, but null was passed") setattr(self, key, field_object.to_python_value(value)) return passed_fields
def _set_field_values(self, values_map: Dict[str, Any]) -> Set[str]: """ Sets values for fields honoring type transformations and return list of fields that were set additionally """ meta = self._meta passed_fields = set() for key, value in values_map.items(): if key in meta.fk_fields: if value and not value._saved_in_db: raise OperationalError( "You should first call .save() on {} before referring to it".format(value) ) field_object = meta.fields_map[key] relation_field = field_object.source_field # type: str # type: ignore setattr(self, key, value) passed_fields.add(relation_field) elif key in meta.fields_db_projection: field_object = meta.fields_map[key] if value is None and not field_object.null: raise ValueError("{} is non nullable field, but null was passed".format(key)) setattr(self, key, field_object.to_python_value(value)) elif key in meta.db_fields: field_object = meta.fields_map[meta.fields_db_projection_reverse[key]] if value is None and not field_object.null: raise ValueError("{} is non nullable field, but null was passed".format(key)) setattr(self, key, field_object.to_python_value(value)) elif key in meta.backward_fk_fields: raise ConfigurationError( "You can't set backward relations through init, change related model instead" ) elif key in meta.m2m_fields: raise ConfigurationError( "You can't set m2m relations through init, use m2m_manager instead" ) return passed_fields
def _get_config_from_config_file(cls, config_file): _, extension = os.path.splitext(config_file) if extension in ('.yml', '.yaml'): import yaml with open(config_file, 'r') as f: config = yaml.safe_load(f) elif extension == '.json': with open(config_file, 'r') as f: config = json.load(f) else: raise ConfigurationError( 'Unknown config extension {}, only .yml and .json are supported' .format(extension)) return config
def _discover_models(cls, models_path, app_label) -> List[Type[Model]]: try: module = importlib.import_module(models_path) except ImportError: raise ConfigurationError('Module "{}" not found'.format(models_path)) discovered_models = [] for attr_name in dir(module): attr = getattr(module, attr_name) if isclass(attr) and issubclass(attr, Model) and not attr._meta.abstract: if attr._meta.app and attr._meta.app != app_label: continue attr._meta.app = app_label discovered_models.append(attr) return discovered_models
async def generate_schemas(cls, safe: bool = True) -> None: """ Generate schemas according to models provided to ``.init()`` method. Will fail if schemas already exists, so it's not recommended to be used as part of application workflow :param safe: When set to true, creates the table only when it does not already exist. :raises ConfigurationError: When ``.init()`` has not been called. """ if not cls._inited: raise ConfigurationError("You have to call .init() first before generating schemas") for connection in cls._connections.values(): await generate_schema_for_client(connection, safe)
def register_listener(cls, signal: Signals, listener: Callable): """ Register listener to current model class for special Signal. :param signal: one of tortoise.signals.Signal :param listener: callable listener :raises ConfigurationError: When listener is not callable """ if not callable(listener): raise ConfigurationError("Signal listener must be callable!") cls_listeners = cls._listeners.get(signal).setdefault(cls, []) # type:ignore if listener not in cls_listeners: cls_listeners.append(listener)
def __init__(self, **kwargs) -> None: # self._meta is a very common attribute lookup, lets cache it. meta = self._meta self._saved_in_db = meta.pk_attr in kwargs and meta.pk.generated # Assign values and do type conversions passed_fields = {*kwargs.keys()} | meta.fetch_fields for key, value in kwargs.items(): if key in meta.fk_fields: if value and not value._saved_in_db: raise OperationalError( f"You should first call .save() on {value} before referring to it" ) setattr(self, key, value) passed_fields.add(meta.fields_map[key].source_field) # type: ignore elif key in meta.fields_db_projection: field_object = meta.fields_map[key] if value is None and not field_object.null: raise ValueError(f"{key} is non nullable field, but null was passed") setattr(self, key, field_object.to_python_value(value)) elif key in meta.backward_fk_fields: raise ConfigurationError( "You can't set backward relations through init, change related model instead" ) elif key in meta.m2m_fields: raise ConfigurationError( "You can't set m2m relations through init, use m2m_manager instead" ) # Assign defaults for missing fields for key in meta.fields.difference(passed_fields): field_object = meta.fields_map[key] if callable(field_object.default): setattr(self, key, field_object.default()) else: setattr(self, key, field_object.default)
def _get_config_from_config_file(cls, config_file: str) -> dict: _, extension = os.path.splitext(config_file) if extension in (".yml", ".yaml"): import yaml # pylint: disable=C0415 with open(config_file, "r") as f: config = yaml.safe_load(f) elif extension == ".json": with open(config_file, "r") as f: config = json.load(f) else: raise ConfigurationError( f"Unknown config extension {extension}, only .yml and .json are supported" ) return config
def __init__( self, model_name: str, related_name: Union[Optional[str], Literal[False]] = None, on_delete: str = CASCADE, **kwargs: Any, ) -> None: if len(model_name.split(".")) != 2: raise ConfigurationError( 'OneToOneField accepts model name in format "app.Model"') super().__init__(model_name, related_name, on_delete, unique=True, **kwargs)
def dfs(table_name: str) -> None: table_state = table_state_map.get(table_name, 0) # 0 == NOT_VISITED if table_state == 1: # 1 == VISITING raise ConfigurationError( "Can't create schema due to cyclic fk references") if table_state == 0: # 0 == NOT_VISITED table_state_map[table_name] = 1 # 1 == VISITING table = primary_tables[table_name] for ref_table in table.references: if ref_table != table_name: # avoid self references dfs(ref_table) table_creation_sqls.append(table.creation_sql) table_state_map[table_name] = 2 # 2 == VISITED
def _resolve_field_for_model(self, model: "Type[Model]", table: Table, field: str, *default_values: Any) -> dict: field_split = field.split("__") if not field_split[1:]: function_joins = [] if field_split[0] in model._meta.fetch_fields: related_field = cast(RelationalField, model._meta.fields_map[field_split[0]]) related_field_meta = related_field.model_class._meta join = (table, field_split[0], related_field) function_joins.append(join) field = related_field_meta.basetable[ related_field_meta.db_pk_field] else: field = table[field_split[0]] if self.populate_field_object: self.field_object = model._meta.fields_map.get( field_split[0], None) if self.field_object: # pragma: nobranch func = self.field_object.get_for_dialect( model._meta.db.capabilities.dialect, "function_cast") if func: field = func(self.field_object, field) function_field = self.database_func(field, *default_values) return {"joins": function_joins, "field": function_field} if field_split[0] not in model._meta.fetch_fields: raise ConfigurationError(f"{field} not resolvable") related_field = cast(RelationalField, model._meta.fields_map[field_split[0]]) join = (table, field_split[0], related_field) related_table = related_field.model_class._meta.basetable if isinstance(related_field, ForeignKeyFieldInstance): # Only FK's can be to same table, so we only auto-alias FK join tables related_table = related_table.as_( f"{table.get_table_name()}__{field_split[0]}") function = self._resolve_field_for_model(related_field.model_class, related_table, "__".join(field_split[1:]), *default_values) function["joins"].append(join) return function
async def _drop_databases(cls) -> None: """ Tries to drop all databases provided in config passed to ``.init()`` method. Normally should be used only for testing purposes. :raises ConfigurationError: When ``.init()`` has not been called. """ if not cls._inited: raise ConfigurationError( "You have to call .init() first before deleting schemas") # this closes any existing connections/pool if any and clears # the storage await connections.close_all(discard=False) for conn in connections.all(): await conn.db_delete() connections.discard(conn.connection_name) await cls._reset_apps()
def _init_apps(cls, apps_config: dict) -> None: for name, info in apps_config.items(): try: connections.get(info.get("default_connection", "default")) except KeyError: raise ConfigurationError( 'Unknown connection "{}" for app "{}"'.format( info.get("default_connection", "default"), name)) cls.init_models(info["models"], name, _init_relations=False) for model in cls.apps[name].values(): model._meta.default_connection = info.get( "default_connection", "default") cls._init_relations() cls._build_initial_querysets()
def db_config(self) -> "DBConfigType": """ Return the DB config. This is the same config passed to the :meth:`Tortoise.init<tortoise.Tortoise.init>` method while initialization. :raises ConfigurationError: If this property is accessed before calling the :meth:`Tortoise.init<tortoise.Tortoise.init>` method. """ if self._db_config is None: raise ConfigurationError( "DB configuration not initialised. Make sure to call " "Tortoise.init with a valid configuration before attempting " "to create connections." ) return self._db_config
def __init__(self, model_name: str, through: Optional[str] = None, forward_key: Optional[str] = None, backward_key: str = "", related_name: str = "", **kwargs) -> None: super().__init__(**kwargs) if len(model_name.split(".")) != 2: raise ConfigurationError( 'Foreign key accepts model name in format "app.Model"') self.model_name = model_name self.related_name = related_name self.forward_key = forward_key or "{}_id".format( model_name.split(".")[1].lower()) self.backward_key = backward_key self.through = through self._generated = False
def add_field(self, name: str, value: Field): if name in self.fields_map: raise ConfigurationError(f"Field {name} already present in meta") value.model = self._model self.fields_map[name] = value if value.has_db_field: self.fields_db_projection[name] = value.source_field or name if isinstance(value, fields.ManyToManyField): self.m2m_fields.add(name) elif isinstance(value, fields.BackwardFKRelation): self.backward_fk_fields.add(name) field_filters = get_filters_for_field( field_name=name, field=value, source_field=value.source_field or name ) self._filters.update(field_filters) self.finalise_fields()