def __init__( self, name, column_name, schema=None, newname=None, type_=None, nullable=None, default=False, autoincrement=None, comment=False, ): super(AlterColumn, self).__init__(name, schema=schema) self.column_name = column_name self.nullable = nullable self.newname = newname self.default = default self.autoincrement = autoincrement self.comment = comment if type_ is None: raise util.CommandError( "All MySQL CHANGE/MODIFY COLUMN operations " "require the existing type." ) self.type_ = sqltypes.to_instance(type_)
def alter_column(self, table_name, column_name, nullable=None, server_default=False, name=None, type_=None, autoincrement=None, **kw ): existing = self.columns[column_name] existing_transfer = self.column_transfers[column_name] if name is not None and name != column_name: # note that we don't change '.key' - we keep referring # to the renamed column by its old key in _create(). neat! existing.name = name existing_transfer["name"] = name if type_ is not None: type_ = sqltypes.to_instance(type_) existing.type = type_ existing_transfer["expr"] = cast(existing_transfer["expr"], type_) if nullable is not None: existing.nullable = nullable if server_default is not False: existing.server_default = server_default if autoincrement is not None: existing.autoincrement = bool(autoincrement)
def test_varchar_raise(self): for type_ in ( String, VARCHAR, String(), VARCHAR(), Unicode, Unicode(), ): type_ = sqltypes.to_instance(type_) assert_raises_message( exc.CompileError, "VARCHAR requires a length on dialect firebird", type_.compile, dialect=firebird.dialect(), ) t1 = Table("sometable", MetaData(), Column("somecolumn", type_)) assert_raises_message( exc.CompileError, r"\(in table 'sometable', column 'somecolumn'\)\: " r"(?:N)?VARCHAR requires a length on dialect firebird", schema.CreateTable(t1).compile, dialect=firebird.dialect(), )
def to_sql(self, frame, name, if_exists='fail', index=True, index_label=None, schema=None, chunksize=None, dtype=None): if dtype and not is_dict_like(dtype): dtype = {col_name: dtype for col_name in frame} if dtype is not None: from sqlalchemy.types import to_instance, TypeEngine for col, my_type in dtype.items(): if not isinstance(to_instance(my_type), TypeEngine): raise ValueError('The type of %s is not a SQLAlchemy ' 'type ' % col) table = GeoSQLTable(name, self, frame=frame, index=index, if_exists=if_exists, index_label=index_label, schema=schema, dtype=dtype) table.create() table.insert(chunksize) if not name.isdigit() and not name.islower(): # check for potentially case sensitivity issues (GH7815) # Only check when name is not a number and name is not lower case engine = self.connectable.engine with self.connectable.connect() as conn: table_names = engine.table_names( schema=schema or self.meta.schema, connection=conn, ) if name not in table_names: msg = ( "The provided table name '{0}' is not found exactly as " "such in the database after writing the table, possibly " "due to case sensitivity issues. Consider using lower " "case table names." ).format(name) warnings.warn(msg, UserWarning)
def to_sql_k(self, frame, name, if_exists='fail', index=True, index_label=None, schema=None, chunksize=None, dtype=None, **kwargs): if dtype is not None: from sqlalchemy.types import to_instance, TypeEngine for col, my_type in dtype.items(): if not isinstance(to_instance(my_type), TypeEngine): raise ValueError('The type of %s is not a SQLAlchemy ' 'type ' % col) table = pd.io.sql.SQLTable( name, self, frame=frame, index=index, if_exists=if_exists, index_label=index_label, schema=schema, dtype=dtype, **kwargs) table.create() table.insert(chunksize)
def test_varchar_raise(self): for type_ in ( String, VARCHAR, String(), VARCHAR(), NVARCHAR(), Unicode, Unicode(), ): type_ = sqltypes.to_instance(type_) assert_raises_message( exc.CompileError, "VARCHAR requires a length on dialect mysql", type_.compile, dialect=mysql.dialect() ) t1 = Table('sometable', MetaData(), Column('somecolumn', type_) ) assert_raises_message( exc.CompileError, r"\(in table 'sometable', column 'somecolumn'\)\: " r"(?:N)?VARCHAR requires a length on dialect mysql", schema.CreateTable(t1).compile, dialect=mysql.dialect() )
def __init__(self, type_=None, args=(), **kwargs): self.packagenames = [] self.name = self.__class__.__name__ self._bind = kwargs.get('bind', None) self.clause_expr = ClauseList( operator=operators.comma_op, group_contents=True, *args).self_group() self.type = sqltypes.to_instance( type_ or getattr(self, '__return_type__', None))
def _visit_create_composite_type(create, compiler, **kw): type_ = create.element fields = ', '.join('{name} {type}'.format( name=column.name, type=compiler.dialect.type_compiler.process(to_instance(column.type))) for column in type_.columns) return 'CREATE TYPE {name} AS ({fields})'.format( name=compiler.preparer.format_type(type_), fields=fields)
def __init__(self, name, column_name, schema=None, existing_type=None, existing_nullable=None, existing_server_default=None): super(AlterColumn, self).__init__(name, schema=schema) self.column_name = column_name self.existing_type = sqltypes.to_instance(existing_type) \ if existing_type is not None else None self.existing_nullable = existing_nullable self.existing_server_default = existing_server_default
def __init__(self, name, column_name, schema=None, existing_type=None, existing_nullable=None, existing_server_default=None): super(AlterColumn, self).__init__(name, schema=schema) self.column_name = column_name self.existing_type=sqltypes.to_instance(existing_type) \ if existing_type is not None else None self.existing_nullable=existing_nullable self.existing_server_default=existing_server_default
def alter_column( self, table_name, column_name, nullable=None, server_default=False, name=None, type_=None, autoincrement=None, **kw ): existing = self.columns[column_name] existing_transfer = self.column_transfers[column_name] if name is not None and name != column_name: # note that we don't change '.key' - we keep referring # to the renamed column by its old key in _create(). neat! existing.name = name existing_transfer["name"] = name if type_ is not None: type_ = sqltypes.to_instance(type_) # old type is being discarded so turn off eventing # rules. Alternatively we can # erase the events set up by this type, but this is simpler. # we also ignore the drop_constraint that will come here from # Operations.implementation_for(alter_column) if isinstance(existing.type, SchemaEventTarget): existing.type._create_events = ( existing.type.create_constraint ) = False if existing.type._type_affinity is not type_._type_affinity: existing_transfer["expr"] = cast( existing_transfer["expr"], type_ ) existing.type = type_ # we *dont* however set events for the new type, because # alter_column is invoked from # Operations.implementation_for(alter_column) which already # will emit an add_constraint() if nullable is not None: existing.nullable = nullable if server_default is not False: if server_default is None: existing.server_default = None else: sql_schema.DefaultClause(server_default)._set_parent(existing) if autoincrement is not None: existing.autoincrement = bool(autoincrement)
def _visit_create_composite_type(create, compiler, **kw): type_ = create.element fields = ', '.join( '{name} {type}'.format( name=column.name, type=compiler.dialect.type_compiler.process( to_instance(column.type) ) ) for column in type_.columns ) return 'CREATE TYPE {name} AS ({fields})'.format( name=compiler.preparer.format_type(type_), fields=fields )
def test_default_reflection(self, datatype, default, expected, metadata, connection): t1 = Table("t1", metadata, Column("x", datatype, default)) t1.create(connection) insp = inspect(connection) datatype_inst = types.to_instance(datatype) col = insp.get_columns("t1")[0] if hasattr(expected, "match"): assert expected.match(col["default"]) elif isinstance(datatype_inst, (Integer, Numeric)): pattern = re.compile(r"\'?%s\'?" % expected) assert pattern.match(col["default"]) else: eq_(col["default"], expected)
def __init__( self, name: str, column_name: str, schema: Optional[str] = None, existing_type: Optional["TypeEngine"] = None, existing_nullable: Optional[bool] = None, existing_server_default: Optional[_ServerDefault] = None, existing_comment: Optional[str] = None, ) -> None: super(AlterColumn, self).__init__(name, schema=schema) self.column_name = column_name self.existing_type = (sqltypes.to_instance(existing_type) if existing_type is not None else None) self.existing_nullable = existing_nullable self.existing_server_default = existing_server_default self.existing_comment = existing_comment
def test_varchar_raise(self, type_): type_ = sqltypes.to_instance(type_) assert_raises_message( exc.CompileError, "VARCHAR requires a length on dialect mysql", type_.compile, dialect=mysql.dialect(), ) t1 = Table("sometable", MetaData(), Column("somecolumn", type_)) assert_raises_message( exc.CompileError, r"\(in table 'sometable', column 'somecolumn'\)\: " r"(?:N)?VARCHAR requires a length on dialect mysql", schema.CreateTable(t1).compile, dialect=mysql.dialect(), )
def to_sql_set_primary_key_and_not_null(self, frame, name, con, keys, sql_table, schema=None, if_exists='fail', index=True, index_label=None, chunksize=None, dtype=None): # ref: https://github.com/pandas-dev/pandas/blob/master/pandas/io/sql.py#L437 if if_exists not in ('fail', 'replace', 'append'): raise ValueError( "'{0}' is not valid for if_exists".format(if_exists)) # ref: https://github.com/pandas-dev/pandas/blob/master/pandas/io/sql.py#L508 pandas_sql = SQLDatabase(con, schema=schema) if isinstance(frame, pd.Series): frame = frame.to_frame() elif not isinstance(frame, pd.DataFrame): raise NotImplementedError( "'frame' argument should be either a Series or a DataFrame") if dtype is not None: from sqlalchemy.types import to_instance, TypeEngine for col, my_type in dtype.items(): if not isinstance(to_instance(my_type), TypeEngine): raise ValueError( 'The type of {} is not a SQLAlchemy type '.format(col)) table = SQLTable(name, pandas_sql, frame=frame, index=index, if_exists=if_exists, index_label=index_label, schema=schema, keys=keys, dtype=dtype) table.table = sql_table table.create() table.insert(chunksize)
def as_mutable(cls, sqltype): """Associate a SQL type with this mutable Python type. This establishes listeners that will detect ORM mappings against the given type, adding mutation event trackers to those mappings. The type is returned, unconditionally as an instance, so that :meth:`.as_mutable` can be used inline:: Table('mytable', metadata, Column('id', Integer, primary_key=True), Column('data', MyMutableType.as_mutable(PickleType)) ) Note that the returned type is always an instance, even if a class is given, and that only columns which are declared specifically with that type instance receive additional instrumentation. To associate a particular mutable type with all occurrences of a particular type, use the :meth:`.Mutable.associate_with` classmethod of the particular :meth:`.Mutable` subclass to establish a global association. .. warning:: The listeners established by this method are *global* to all mappers, and are *not* garbage collected. Only use :meth:`.as_mutable` for types that are permanent to an application, not with ad-hoc types else this will cause unbounded growth in memory usage. """ sqltype = types.to_instance(sqltype) def listen_for_type(mapper, class_): for prop in mapper.iterate_properties: if hasattr(prop, 'columns'): if prop.columns[0].type is sqltype: cls.associate_with_attribute(getattr(class_, prop.key)) event.listen(mapper, 'mapper_configured', listen_for_type) return sqltype
def as_mutable(cls, orig_sqltype): """Mark the value as nested mutable value. What happens here * We coerce the return value - the type value set on sqlalchemy.Column() to the underlying SQL typ * We mark this type value with a marker attribute * Then we set a global SQAlchemy mapper event handler * When mapper is done setting up our model classes, it will call the event handler for all models * We check if any of the models columns have our marked type value as the value * If so we call ``associate_with_attribute`` for this model and column that sets up ``MutableBase._listen_on_attribute`` event handlers. These event handlers take care of taking the raw dict coming out from database and wrapping it to NestedMutableDict. :param orig_sqltype: Usually websauna.system.model.column.JSONB instance :return: Marked and coerced type value """ # Create an instance of this type and add a marker attribute, # so we later find it. # We cannot directly compare the result type values, as looks like # the type value might be mangled by dialect specific implementations # or lost somewhere. Never figured this out 100%. sqltype = types.to_instance(orig_sqltype) sqltype._column_value_id = id(sqltype) def listen_for_type(mapper, class_): for prop in mapper.column_attrs: # The original implementation has SQLAlchemy type comparator. # Here we need to be little more complex, because we define a type alias # for generic JSONB implementation if getattr(prop.columns[0].type, "_column_value_id", None) == sqltype._column_value_id: cls.associate_with_attribute(getattr(class_, prop.key)) event.listen(mapper, 'mapper_configured', listen_for_type) return sqltype
def _compile_ndb_string(element, compiler, **kw): """Process ndb specific overrides for String. Function will intercept mysql_ndb_length and mysql_ndb_type arguments to adjust columns automatically. mysql_ndb_length argument will adjust the String length to the requested value. mysql_ndb_type will change the column type to the requested data type. """ if not ndb_status(compiler): return compiler.visit_string(element, **kw) if element.mysql_ndb_length: effective_type = compat_utils.adapt_type_object( element, _String, length=element.mysql_ndb_length) return compiler.visit_string(effective_type, **kw) elif element.mysql_ndb_type: effective_type = to_instance(element.mysql_ndb_type) return compiler.process(effective_type, **kw) else: return compiler.visit_string(element, **kw)
def _compile_ndb_string(element, compiler, **kw): """Process ndb specific overrides for String. Function will intercept mysql_ndb_length and mysql_ndb_type arguments to adjust columns automatically. mysql_ndb_length argument will adjust the String length to the requested value. mysql_ndb_type will change the column type to the requested data type. """ if not ndb_status(compiler): return compiler.visit_string(element, **kw) if element.mysql_ndb_length: effective_type = element.adapt(_String, length=element.mysql_ndb_length) return compiler.visit_string(effective_type, **kw) elif element.mysql_ndb_type: effective_type = to_instance(element.mysql_ndb_type) return compiler.process(effective_type, **kw) else: return compiler.visit_string(element, **kw)
def get_types(): sqlalchemy_data_types = {} for attribute in [ a for a in types.__all__ if callable(getattr(types, a)) and a not in [ 'TypeDecorator', 'TypeEngine', 'UserDefinedType', 'JSON', 'BLOB', 'LargeBinary', 'Binary', 'PickleType' ] ]: try: attribute = types.to_instance(getattr(types, attribute)) if isinstance(attribute, types.TypeEngine): fields = vars(attribute) sqlalchemy_data_types[str(attribute)] = fields except TypeError: continue return sqlalchemy_data_types
def __init__(self, name: str, column_name: str, type_: "TypeEngine", **kw) -> None: super(ColumnType, self).__init__(name, column_name, **kw) self.type_ = sqltypes.to_instance(type_)
def __init__(self, name, column_name, type_, **kw): super(ColumnType, self).__init__(name, column_name, **kw) self.type_ = sqltypes.to_instance(type_)
def __init__(self, name, column_name, type_, **kw): using = kw.pop("using", None) super(PostgresqlColumnType, self).__init__(name, column_name, **kw) self.type_ = sqltypes.to_instance(type_) self.using = using
def __init__(self, name, column_name, type_, **kw): using = kw.pop('using', None) super(VerticaColumnType, self).__init__(name, column_name, **kw) self.type_ = sqltypes.to_instance(type_) self.using = using
def __init__(self, name: str, column_name: str, type_: "TypeEngine", **kw) -> None: using = kw.pop("using", None) super(PostgresqlColumnType, self).__init__(name, column_name, **kw) self.type_ = sqltypes.to_instance(type_) self.using = using
def alter_column( self, table_name: str, column_name: str, nullable: Optional[bool] = None, server_default: Optional[Union["Function", str, bool]] = False, name: Optional[str] = None, type_: Optional["TypeEngine"] = None, autoincrement: None = None, comment: Union[str, "Literal[False]"] = False, **kw ) -> None: existing = self.columns[column_name] existing_transfer: Dict[str, Any] = self.column_transfers[column_name] if name is not None and name != column_name: # note that we don't change '.key' - we keep referring # to the renamed column by its old key in _create(). neat! existing.name = name existing_transfer["name"] = name # pop named constraints for Boolean/Enum for rename if ( "existing_type" in kw and isinstance(kw["existing_type"], SchemaEventTarget) and kw["existing_type"].name # type:ignore[attr-defined] ): self.named_constraints.pop( kw["existing_type"].name, None # type:ignore[attr-defined] ) if type_ is not None: type_ = sqltypes.to_instance(type_) # old type is being discarded so turn off eventing # rules. Alternatively we can # erase the events set up by this type, but this is simpler. # we also ignore the drop_constraint that will come here from # Operations.implementation_for(alter_column) if isinstance(existing.type, SchemaEventTarget): existing.type._create_events = ( # type:ignore[attr-defined] existing.type.create_constraint # type:ignore[attr-defined] # noqa ) = False self.impl.cast_for_batch_migrate( existing, existing_transfer, type_ ) existing.type = type_ # we *dont* however set events for the new type, because # alter_column is invoked from # Operations.implementation_for(alter_column) which already # will emit an add_constraint() if nullable is not None: existing.nullable = nullable if server_default is not False: if server_default is None: existing.server_default = None else: sql_schema.DefaultClause( server_default )._set_parent( # type:ignore[attr-defined] existing ) if autoincrement is not None: existing.autoincrement = bool(autoincrement) if comment is not False: existing.comment = comment
def __init__(self, base, field, type_): self.name = field self.type = to_instance(type_) super(CompositeElement, self).__init__(base)
def to_sql(self, catalog, name, if_exists='fail', schema=None, chunksize=None, dtype=None): """ Write records stored in a afw table to a SQL database. Parameters ---------- catalog : afw table name : string Name of SQL table if_exists : {'fail', 'replace', 'append'}, default 'fail' - fail: If table exists, do nothing. - replace: If table exists, drop it, recreate it, and insert data. - append: If table exists, insert data. Create if does not exist. schema : string, default: None Name of SQL schema in database to write to (if database flavor supports this). If specified, this overwrites the default schema of the SQLDatabase object. chunksize : int, default: None If not None, then rows will be written in batches of this size at a time. If None, all rows will be written at once. dtype : single type or dict of column name to SQL type, default: None Optional specifying the datatype for columns. The SQL type should be a SQLAlchemy type. If all columns are of the same type, one single value can be used. """ if dtype and not _is_dict_like(dtype): dtype = {col_name: dtype for col_name in catalog} if dtype is not None: for col, my_type in dtype.items(): if not isinstance(to_instance(my_type), TypeEngine): raise ValueError('The type of %s is not a SQLAlchemy ' 'type ' % col) table = SQLTable(name, self, catalog=catalog, if_exists=if_exists, schema=schema, dtype=dtype) table.create() table.insert(chunksize) if (not name.isdigit() and not name.islower()): # check for potentially case sensitivity issues # Only check when name is not a number and name is not lower case engine = self.connectable.engine with self.connectable.connect() as conn: table_names = engine.table_names( schema=schema or self.meta.schema, connection=conn, ) if name not in table_names: msg = ( "The provided table name '{0}' is not found exactly as " "such in the database after writing the table, possibly " "due to case sensitivity issues. Consider using lower " "case table names.").format(name)
def to_sql(self, frame, name, if_exists='fail', index=True, index_label=None, schema=None, chunksize=None, dtype=None, pkcs=None): """ Write records stored in a DataFrame to a SQL database. Parameters ---------- frame : DataFrame name : string Name of SQL table. if_exists : {'fail', 'replace', 'append'}, default 'fail' - fail: If table exists, do nothing. - replace: If table exists, drop it, recreate it, and insert data. - append: If table exists, insert data. Create if does not exist. index : boolean, default True Write DataFrame index as a column. index_label : string or sequence, default None Column label for index column(s). If None is given (default) and `index` is True, then the index names are used. A sequence should be given if the DataFrame uses MultiIndex. schema : string, default None Name of SQL schema in database to write to (if database flavor supports this). If specified, this overwrites the default schema of the SQLDatabase object. chunksize : int, default None If not None, then rows will be written in batches of this size at a time. If None, all rows will be written at once. dtype : single type or dict of column name to SQL type, default None Optional specifying the datatype for columns. The SQL type should be a SQLAlchemy type. If all columns are of the same type, one single value can be used. """ if dtype and not is_dict_like(dtype): dtype = {col_name: dtype for col_name in frame} if dtype is not None: from sqlalchemy.types import to_instance, TypeEngine for col, my_type in dtype.items(): if not isinstance(to_instance(my_type), TypeEngine): raise ValueError('The type of %s is not a SQLAlchemy ' 'type ' % col) table = SQLTable_extend(name, self, frame=frame, index=index, if_exists=if_exists, index_label=index_label, schema=schema, dtype=dtype, pkcs=pkcs) table.create() table.insert(chunksize) if (not name.isdigit() and not name.islower()): # check for potentially case sensitivity issues (GH7815) # Only check when name is not a number and name is not lower case engine = self.connectable.engine with self.connectable.connect() as conn: table_names = engine.table_names( schema=schema or self.meta.schema, connection=conn, ) if name not in table_names: msg = ( "The provided table name '{0}' is not found exactly as " "such in the database after writing the table, possibly " "due to case sensitivity issues. Consider using lower " "case table names.").format(name) warnings.warn(msg, UserWarning)
def __init__(self, base, field, type_): ColumnElement.__init__(self) self.base = base self.field = field self.type = to_instance(type_)
def __init__(self, name, column_name, type_, **kw): using = kw.pop('using', None) super(PostgresqlColumnType, self).__init__(name, column_name, **kw) self.type_ = sqltypes.to_instance(type_) self.using = using