class ResidenceOccupancy(Base): __tablename__ = "residence_occupancy" __table_args__ = ( sql_schema.ForeignKeyConstraint(["user_id", "u_community_id"], ["user.id", "user.community_id"]), sql_schema.ForeignKeyConstraint( ["residence_id", "r_community_id"], ["residence.id", "residence.community_id"], ), sql_schema.CheckConstraint("u_community_id = r_community_id"), ) residence_id = Column(sql_types.Integer, nullable=False, primary_key=True) user_id = Column(sql_types.Integer, nullable=False, primary_key=True) r_community_id = Column(sql_types.Integer, nullable=False) u_community_id = Column(sql_types.Integer, nullable=False) relationship_name = Column(sql_types.String(40), nullable=False, default="Occupant") is_owner = Column(sql_types.Boolean, nullable=False, default=False) residence = orm.relationship("Residence", lazy=True, viewonly=True) user = orm.relationship("User", lazy=True, viewonly=True) def __repr__(self): return "ResidenceOccupancy()"
class ProfileServiceSource(Base): __tablename__ = 'dird_profile_service_source' __table_args__ = ( schema.ForeignKeyConstraint( ['profile_service_uuid', 'profile_tenant_uuid'], [ 'dird_profile_service.uuid', 'dird_profile_service.profile_tenant_uuid' ], ondelete='CASCADE', name='dird_profile_service_source_profile_service_uuid_tenant_fkey', ), schema.ForeignKeyConstraint( ['source_uuid', 'source_tenant_uuid'], ['dird_source.uuid', 'dird_source.tenant_uuid'], ondelete='CASCADE', name='dird_profile_service_source_source_uuid_tenant_fkey', ), schema.CheckConstraint('profile_tenant_uuid = source_tenant_uuid'), ) profile_service_uuid = Column(String(UUID_LENGTH), primary_key=True) profile_tenant_uuid = Column(String(UUID_LENGTH)) source_uuid = Column(String(UUID_LENGTH), primary_key=True) source_tenant_uuid = Column(String(UUID_LENGTH)) sources = relationship('Source')
def moretableinfo(self, connection, table): """Return (tabletype, {colname:foreignkey,...}) execute(SHOW CREATE TABLE child) => CREATE TABLE `child` ( `id` int(11) default NULL, `parent_id` int(11) default NULL, KEY `par_ind` (`parent_id`), CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON DELETE CASCADE\n) TYPE=InnoDB """ c = connection.execute("SHOW CREATE TABLE " + table.name, {}) desc = c.fetchone()[1].strip() tabletype = '' lastparen = re.search(r'\)[^\)]*\Z', desc) if lastparen: match = re.search(r'\b(?:TYPE|ENGINE)=(?P<ttype>.+)\b', desc[lastparen.start():], re.I) if match: tabletype = match.group('ttype') fkpat = r'CONSTRAINT `(?P<name>.+?)` FOREIGN KEY \((?P<columns>.+?)\) REFERENCES `(?P<reftable>.+?)` \((?P<refcols>.+?)\)' for match in re.finditer(fkpat, desc): columns = re.findall(r'`(.+?)`', match.group('columns')) refcols = [ match.group('reftable') + "." + x for x in re.findall(r'`(.+?)`', match.group('refcols')) ] constraint = schema.ForeignKeyConstraint(columns, refcols, name=match.group('name')) table.append_item(constraint) return tabletype
def clear_db(db_path): db_engine = create_engine(db_path, poolclass=NullPool) with db_engine.begin() as conn: meta = MetaData() meta.reflect(bind=db_engine) inspector = reflection.Inspector.from_engine(db_engine) tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint(tuple(), tuple(), name=fk['name'])) t = schema.Table(table_name, meta, *fks, extend_existing=True) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) # such construction is available only for postgresql if db_engine.name == "postgresql": for en in _get_enums(conn): conn.execute("DROP TYPE {0}".format(en))
def _foreign_key_constraint(self, name, source, referent, local_cols, remote_cols, onupdate=None, ondelete=None, deferrable=None, source_schema=None, referent_schema=None): m = sa_schema.MetaData() if source == referent: t1_cols = local_cols + remote_cols else: t1_cols = local_cols sa_schema.Table(referent, m, *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], schema=referent_schema) t1 = sa_schema.Table(source, m, *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], schema=source_schema) tname = "%s.%s" % (referent_schema, referent) if referent_schema \ else referent f = sa_schema.ForeignKeyConstraint(local_cols, ["%s.%s" % (tname, n) for n in remote_cols], name=name, onupdate=onupdate, ondelete=ondelete, deferrable=deferrable ) t1.append_constraint(f) return f
def generic_constraint(self, name, table_name, type_, schema=None, **kw): t = self.table(table_name, schema=schema) types = { "foreignkey": lambda name: sa_schema.ForeignKeyConstraint([], [], name=name), "primary": sa_schema.PrimaryKeyConstraint, "unique": sa_schema.UniqueConstraint, "check": lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint, } try: const = types[type_] except KeyError as ke: raise_( TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))), from_=ke, ) else: const = const(name=name) t.append_constraint(const) return const
def generic_constraint(self, name: Optional[str], table_name: str, type_: Optional[str], schema: Optional[str] = None, **kw) -> Any: t = self.table(table_name, schema=schema) types: Dict[Optional[str], Any] = { "foreignkey": lambda name: sa_schema.ForeignKeyConstraint([], [], name=name), "primary": sa_schema.PrimaryKeyConstraint, "unique": sa_schema.UniqueConstraint, "check": lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint, } try: const = types[type_] except KeyError as ke: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) from ke else: const = const(name=name) t.append_constraint(const) return const
def tearDown(self): sqlalchemy.orm.session.Session.close_all() meta = sqlalchemy.MetaData() meta.bind = self.engine meta.reflect(self.engine) with self.engine.begin() as conn: inspector = reflection.Inspector.from_engine(self.engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) sql.cleanup() super(SqlMigrateBase, self).tearDown()
class ProfileService(Base): __tablename__ = 'dird_profile_service' __table_args__ = ( schema.UniqueConstraint('uuid', 'profile_tenant_uuid'), schema.ForeignKeyConstraint( ['profile_uuid', 'profile_tenant_uuid'], ['dird_profile.uuid', 'dird_profile.tenant_uuid'], ondelete='CASCADE', name='dird_profile_service_profile_uuid_tenant_fkey', ), ) uuid = Column(String(UUID_LENGTH), server_default=text('uuid_generate_v4()'), primary_key=True) profile_uuid = Column(String(UUID_LENGTH)) profile_tenant_uuid = Column(String(UUID_LENGTH)) service_uuid = Column( String(UUID_LENGTH), ForeignKey('dird_service.uuid', ondelete='CASCADE'), ) config = Column(JSON) service = relationship('Service') profile_service_sources = relationship('ProfileServiceSource') sources = association_proxy('profile_service_sources', 'sources')
def drop_constraint(self, name, tablename, type=None): """Drop a constraint of the given name, typically via DROP CONSTRAINT. :param name: name of the constraint. :param tablename: tablename. :param type: optional, required on MySQL. can be 'foreignkey', 'primary', 'unique', or 'check'. .. versionadded:: 0.3.6 'primary' qualfier to enable dropping of MySQL primary key constraints. """ t = self._table(tablename) types = { 'foreignkey':lambda name:schema.ForeignKeyConstraint( [], [], name=name), 'primary':schema.PrimaryKeyConstraint, 'unique':schema.UniqueConstraint, 'check':lambda name:schema.CheckConstraint("", name=name), None:schema.Constraint } try: const = types[type] except KeyError: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) const = const(name=name) t.append_constraint(const) self.impl.drop_constraint(const)
class PropertySheet(Base): '''A triple describing a resource ''' __tablename__ = 'propsheets' __table_args__ = ( schema.ForeignKeyConstraint( ['rid', 'name'], ['current_propsheets.rid', 'current_propsheets.name'], name='fk_property_sheets_rid_name', use_alter=True, deferrable=True, initially='DEFERRED', ), ) # The sid column also serves as the order. sid = Column(types.Integer, autoincrement=True, primary_key=True) rid = Column(UUID, ForeignKey('resources.rid', deferrable=True, initially='DEFERRED'), nullable=False, index=True) name = Column(types.String, nullable=False) properties = Column(JSON) tid = Column(UUID, ForeignKey('transactions.tid', deferrable=True, initially='DEFERRED'), nullable=False) resource = orm.relationship('Resource') transaction = orm.relationship('TransactionRecord')
class Profile(Base): __tablename__ = 'dird_profile' __table_args__ = ( schema.UniqueConstraint('uuid', 'tenant_uuid'), schema.UniqueConstraint('name', 'tenant_uuid'), schema.ForeignKeyConstraint( ['display_uuid', 'display_tenant_uuid'], ['dird_display.uuid', 'dird_display.tenant_uuid'], ondelete='SET NULL', name='dird_profile_display_uuid_tenant_fkey', ), schema.CheckConstraint('tenant_uuid = display_tenant_uuid'), ) uuid = Column(String(UUID_LENGTH), server_default=text('uuid_generate_v4()'), primary_key=True) tenant_uuid = Column(String(UUID_LENGTH), ForeignKey('dird_tenant.uuid', ondelete='CASCADE')) name = Column(Text(), nullable=False) display_tenant_uuid = Column(String(UUID_LENGTH)) display_uuid = Column(String(UUID_LENGTH)) display = relationship('Display') services = relationship('ProfileService')
def _cleanupDB(self): meta = sqlalchemy.MetaData() meta.bind = self.engine meta.reflect(self.engine) with self.engine.begin() as conn: inspector = reflection.Inspector.from_engine(self.engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: if self.engine.name != 'sqlite': conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table))
def test_manual_table_auto_joins(self): from sqlalchemy import Table, Column, ForeignKey, ForeignKeyConstraint a_b = schema.Table('a_b', self.metadata, schema.Column('a_key1', Integer()), schema.Column('a_key2', String(40) ), schema.Column('b_id', Integer(), schema.ForeignKey('b.id')), schema.ForeignKeyConstraint(['a_key1', 'a_key2'], ['a.key1', 'a.key2'])) class A(self.Entity): using_options(shortnames=True) key1 = Field(Integer, primary_key=True, autoincrement=False) key2 = Field(String(40), primary_key=True) bs_ = ManyToMany('B', table=a_b) class B(self.Entity): using_options(shortnames=True) name = Field(String(60)) as_ = ManyToMany('A', table=a_b) self.create_all() with self.session.begin(): b1 = B(name='b1', as_=[A(key1=10, key2='a1')]) self.session.expire_all() a = A.query.one() b = B.query.one() assert a in b.as_ assert b in a.bs_
def foreign_key_constraint( self, name: Optional[str], source: str, referent: str, local_cols: List[str], remote_cols: List[str], onupdate: Optional[str] = None, ondelete: Optional[str] = None, deferrable: Optional[bool] = None, source_schema: Optional[str] = None, referent_schema: Optional[str] = None, initially: Optional[str] = None, match: Optional[str] = None, **dialect_kw ) -> "ForeignKeyConstraint": m = self.metadata() if source == referent and source_schema == referent_schema: t1_cols = local_cols + remote_cols else: t1_cols = local_cols sa_schema.Table( referent, m, *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], schema=referent_schema ) t1 = sa_schema.Table( source, m, *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], schema=source_schema ) tname = ( "%s.%s" % (referent_schema, referent) if referent_schema else referent ) dialect_kw["match"] = match f = sa_schema.ForeignKeyConstraint( local_cols, ["%s.%s" % (tname, n) for n in remote_cols], name=name, onupdate=onupdate, ondelete=ondelete, deferrable=deferrable, initially=initially, **dialect_kw ) t1.append_constraint(f) return f
def foreign_key_constraint( self, name, source, referent, local_cols, remote_cols, onupdate=None, ondelete=None, deferrable=None, source_schema=None, referent_schema=None, initially=None, match=None, **dialect_kw, ): m = self.metadata() if source == referent and source_schema == referent_schema: t1_cols = local_cols + remote_cols else: t1_cols = local_cols sa_schema.Table( referent, m, *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], schema=referent_schema, ) t1 = sa_schema.Table( source, m, *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], schema=source_schema, ) tname = "%s.%s" % (referent_schema, referent) if referent_schema else referent dialect_kw["match"] = match f = sa_schema.ForeignKeyConstraint( local_cols, ["%s.%s" % (tname, n) for n in remote_cols], name=name, onupdate=onupdate, ondelete=ondelete, deferrable=deferrable, initially=initially, **dialect_kw, ) t1.append_constraint(f) return f
class BillingCharge(Base): __tablename__ = "billing_charge" __table_args__ = ( sql_schema.ForeignKeyConstraint(["user_id", "u_community_id"], ["user.id", "user.community_id"]), sql_schema.ForeignKeyConstraint( ["residence_id", "r_community_id"], ["residence.id", "residence.community_id"], ), sql_schema.CheckConstraint("u_community_id = r_community_id"), sql_schema.CheckConstraint("residence_id is null or user_id is null"), ) id = Column(sql_types.Integer, primary_key=True) residence_id = Column(sql_types.Integer, nullable=True) user_id = Column(sql_types.Integer, nullable=True) r_community_id = Column(sql_types.Integer, nullable=True) u_community_id = Column(sql_types.Integer, nullable=True) name = Column(sql_types.String(200), nullable=False) base_amount = Column(sql_types.Integer, nullable=False) paid = Column(sql_types.Boolean, nullable=False) charge_date = Column(sql_types.Date, nullable=False) due_date = Column(sql_types.Date, nullable=False) def __repr__(self): return (f"BillingCharge(" f"name={self.name}," f"amount={self.amount}," f"paid={self.paid}," f"charge_date={self.charge_date}," f"due_date={self.due_date}," f")") @hybrid_property def amount(self): return decimal.Decimal(".01") * self.base_amount @property def formatted_amount(self): return f"${self.amount}"
def dropdb(): from nailgun.db import migration conn = engine.connect() trans = conn.begin() meta = MetaData() meta.reflect(bind=engine) inspector = reflection.Inspector.from_engine(engine) tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name']) ) t = schema.Table( table_name, meta, *fks, extend_existing=True ) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) custom_types = conn.execute( "SELECT n.nspname as schema, t.typname as type " "FROM pg_type t LEFT JOIN pg_catalog.pg_namespace n " "ON n.oid = t.typnamespace " "WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' " "FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) " "AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el " "WHERE el.oid = t.typelem AND el.typarray = t.oid) " "AND n.nspname NOT IN ('pg_catalog', 'information_schema')" ) for tp in custom_types: conn.execute("DROP TYPE {0}".format(tp[1])) trans.commit() migration.drop_migration_meta(engine) conn.close() engine.dispose()
def drop_constraint(self, name, table_name, type_=None, schema=None): """Drop a constraint of the given name, typically via DROP CONSTRAINT. :param name: name of the constraint. :param table_name: table name. .. versionchanged:: 0.5.0 The ``tablename`` parameter is now named ``table_name``. As this is a positional argument, the old name is no longer present. :param ``type_``: optional, required on MySQL. can be 'foreignkey', 'primary', 'unique', or 'check'. .. versionchanged:: 0.5.0 The ``type`` parameter is now named ``type_``. The old name ``type`` will remain for backwards compatibility. .. versionadded:: 0.3.6 'primary' qualfier to enable dropping of MySQL primary key constraints. :param schema: Optional schema name to operate within. .. versionadded:: 0.4.0 """ t = self._table(table_name, schema=schema) types = { 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint([], [], name=name), 'primary': sa_schema.PrimaryKeyConstraint, 'unique': sa_schema.UniqueConstraint, 'check': lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint } try: const = types[type_] except KeyError: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) const = const(name=name) t.append_constraint(const) self.impl.drop_constraint(const)
def _make_foreign_key(params, conn_table): tname = params['referred_table'] if params['referred_schema']: tname = "%s.%s" % (params['referred_schema'], tname) const = sa_schema.ForeignKeyConstraint( [conn_table.c[cname] for cname in params['constrained_columns']], ["%s.%s" % (tname, n) for n in params['referred_columns']], onupdate=params.get('onupdate'), ondelete=params.get('ondelete'), deferrable=params.get('deferrable'), initially=params.get('initially'), name=params['name']) # needed by 0.7 conn_table.append_constraint(const) return const
def moretableinfo(self, connection, table): """Return (tabletype, {colname:foreignkey,...}) execute(SHOW CREATE TABLE child) => CREATE TABLE `child` ( `id` int(11) default NULL, `parent_id` int(11) default NULL, KEY `par_ind` (`parent_id`), CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON DELETE CASCADE\n) TYPE=InnoDB """ c = connection.execute("SHOW CREATE TABLE " + table.fullname, {}) desc_fetched = c.fetchone()[1] # this can come back as unicode if use_unicode=1 in the mysql connection if type(desc_fetched) is unicode: desc_fetched = str(desc_fetched) elif type(desc_fetched) is not str: # may get array.array object here, depending on version (such as mysql 4.1.14 vs. 4.1.11) desc_fetched = desc_fetched.tostring() desc = desc_fetched.strip() tabletype = '' lastparen = re.search(r'\)[^\)]*\Z', desc) if lastparen: match = re.search(r'\b(?:TYPE|ENGINE)=(?P<ttype>.+)\b', desc[lastparen.start():], re.I) if match: tabletype = match.group('ttype') fkpat = r'''CONSTRAINT [`"'](?P<name>.+?)[`"'] FOREIGN KEY \((?P<columns>.+?)\) REFERENCES [`"'](?P<reftable>.+?)[`"'] \((?P<refcols>.+?)\)''' for match in re.finditer(fkpat, desc): columns = re.findall(r'''[`"'](.+?)[`"']''', match.group('columns')) refcols = [ match.group('reftable') + "." + x for x in re.findall( r'''[`"'](.+?)[`"']''', match.group('refcols')) ] schema.Table(match.group('reftable'), table.metadata, autoload=True, autoload_with=connection) constraint = schema.ForeignKeyConstraint(columns, refcols, name=match.group('name')) table.append_constraint(constraint) return tabletype
class UserSession(Base): __tablename__ = "user_session" __table_args__ = ( sql_schema.ForeignKeyConstraint(["user_id", "community_id"], ["user.id", "user.community_id"]), { "sqlite_autoincrement": True }, ) id = Column(sql_types.Integer, sql_schema.Identity(start=100, cycle=True), primary_key=True) user_id = Column(sql_types.Integer, nullable=False) community_id = Column(sql_types.Integer, nullable=False) last_activity = Column(sql_types.DateTime, nullable=False, server_default=func.now())
def generic_constraint(self, name, table_name, type_, schema=None, **kw): t = self.table(table_name, schema=schema) types = { 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint( [], [], name=name), 'primary': sa_schema.PrimaryKeyConstraint, 'unique': sa_schema.UniqueConstraint, 'check': lambda name: sa_schema.CheckConstraint("", name=name), None: sa_schema.Constraint } try: const = types[type_] except KeyError: raise TypeError("'type' can be one of %s" % ", ".join(sorted(repr(x) for x in types))) else: const = const(name=name) t.append_constraint(const) return const
def _make_foreign_key(params, conn_table): tname = params["referred_table"] if params["referred_schema"]: tname = "%s.%s" % (params["referred_schema"], tname) options = params.get("options", {}) const = sa_schema.ForeignKeyConstraint( [conn_table.c[cname] for cname in params["constrained_columns"]], ["%s.%s" % (tname, n) for n in params["referred_columns"]], onupdate=options.get("onupdate"), ondelete=options.get("ondelete"), deferrable=options.get("deferrable"), initially=options.get("initially"), name=params["name"], ) # needed by 0.7 conn_table.append_constraint(const) return const
def drop_all_objects(self, engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sqlalchemy.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): # note that SQLite reflection does not have names # for foreign keys until SQLAlchemy 1.0 if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name']) ) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if self.supports_drop_fk: for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) self.drop_additional_objects(conn)
def drop_all_objects(engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sa.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk["name"]: continue fks.append(schema.ForeignKeyConstraint((), (), name=fk["name"])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if engine.name != "sqlite": for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) if engine.name == "postgresql": if compat_utils.sqla_100: enums = [e["name"] for e in sa.inspect(conn).get_enums()] else: enums = conn.dialect._load_enums(conn).keys() for e in enums: conn.execute("DROP TYPE %s" % e)
def reflecttable(self, connection, table): import sqlalchemy.databases.information_schema as ischema # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name() columns = self.uppercase_table(ischema.columns) s = sql.select([columns], current_schema and sql.and_(columns.c.table_name == table.name, columns.c.table_schema == current_schema) or columns.c.table_name == table.name, order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = (row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default]) args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = ischema_names[type] coltype = coltype(*args) colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column( schema.Column(name, coltype, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # We also run an sp_columns to check for identity columns: # FIXME: note that this only fetches the existence of an identity column, not it's properties like (seed, increment) # also, add a check to make sure we specify the schema name of the table # cursor = table.engine.execute("sp_columns " + table.name, {}) cursor = connection.execute("sp_columns " + table.name) while True: row = cursor.fetchone() if row is None: break col_name, type_name = row[3], row[5] if type_name.endswith("identity"): ic = table.c[col_name] # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute ic.sequence = schema.Sequence(ic.name + '_identity') # Add constraints RR = self.uppercase_table( ischema.ref_constraints ) #information_schema.referential_constraints TC = self.uppercase_table( ischema.constraints) #information_schema.table_constraints C = self.uppercase_table(ischema.column_constraints).alias( 'C' ) #information_schema.constraint_column_usage: the constrained column R = self.uppercase_table(ischema.column_constraints).alias( 'R' ) #information_schema.constraint_column_usage: the referenced column # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name, C.c.table_name == table.name)) c = connection.execute(s) for row in c: if 'PRIMARY' in row[TC.c.constraint_type.name]: table.primary_key.add(table.c[row[0]]) # Foreign key constraints s = sql.select([ C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name, RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule ], sql.and_( C.c.table_name == table.name, C.c.constraint_name == RR.c.constraint_name, R.c.constraint_name == RR.c.unique_constraint_name), order_by=[RR.c.constraint_name]) rows = connection.execute(s).fetchall() # group rows by constraint ID, to handle multi-column FKs fknm, scols, rcols = (None, [], []) for r in rows: scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r if rfknm != fknm: if fknm: table.append_constraint( schema.ForeignKeyConstraint( scols, ['%s.%s' % (t, c) for (s, t, c) in rcols], fknm)) fknm, scols, rcols = (rfknm, [], []) if (not scol in scols): scols.append(scol) if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol)) if fknm and scols: table.append_constraint( schema.ForeignKeyConstraint( scols, ['%s.%s' % (t, c) for (s, t, c) in rcols], fknm))
def reflecttable(self, connection, table, include_columns): import sqlalchemy.databases.information_schema as ischema # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name(connection) columns = self.uppercase_table(ischema.columns) s = sql.select([columns], current_schema and sql.and_(columns.c.table_name==table.name, columns.c.table_schema==current_schema) or columns.c.table_name==table.name, order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = ( row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default] ) if include_columns and name not in include_columns: continue args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = self.ischema_names.get(type, None) if coltype == MSString and charlen == -1: coltype = MSText() else: if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (type, name)) coltype = sqltypes.NULLTYPE elif coltype in (MSNVarchar, AdoMSNVarchar) and charlen == -1: args[0] = None coltype = coltype(*args) colargs= [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column(schema.Column(name, coltype, nullable=nullable, autoincrement=False, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # We also run an sp_columns to check for identity columns: cursor = connection.execute("sp_columns @table_name = '%s', @table_owner = '%s'" % (table.name, current_schema)) ic = None while True: row = cursor.fetchone() if row is None: break col_name, type_name = row[3], row[5] if type_name.endswith("identity"): ic = table.c[col_name] ic.autoincrement = True # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute ic.sequence = schema.Sequence(ic.name + '_identity') # MSSQL: only one identity per table allowed cursor.close() break if not ic is None: try: cursor = connection.execute("select ident_seed(?), ident_incr(?)", table.fullname, table.fullname) row = cursor.fetchone() cursor.close() if not row is None: ic.sequence.start=int(row[0]) ic.sequence.increment=int(row[1]) except: # ignoring it, works just like before pass # Add constraints RR = self.uppercase_table(ischema.ref_constraints) #information_schema.referential_constraints TC = self.uppercase_table(ischema.constraints) #information_schema.table_constraints C = self.uppercase_table(ischema.pg_key_constraints).alias('C') #information_schema.constraint_column_usage: the constrained column R = self.uppercase_table(ischema.pg_key_constraints).alias('R') #information_schema.constraint_column_usage: the referenced column # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name, C.c.table_name == table.name)) c = connection.execute(s) for row in c: if 'PRIMARY' in row[TC.c.constraint_type.name]: table.primary_key.add(table.c[row[0]]) # Foreign key constraints s = sql.select([C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name, RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule], sql.and_(C.c.table_name == table.name, C.c.table_schema == (table.schema or current_schema), C.c.constraint_name == RR.c.constraint_name, R.c.constraint_name == RR.c.unique_constraint_name, C.c.ordinal_position == R.c.ordinal_position ), order_by = [RR.c.constraint_name, R.c.ordinal_position]) rows = connection.execute(s).fetchall() def _gen_fkref(table, rschema, rtbl, rcol): if table.schema and rschema != table.schema or rschema != current_schema: return '.'.join([rschema, rtbl, rcol]) else: return '.'.join([rtbl, rcol]) # group rows by constraint ID, to handle multi-column FKs fknm, scols, rcols = (None, [], []) for r in rows: scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r if table.schema and rschema != table.schema or rschema != current_schema: schema.Table(rtbl, table.metadata, schema=rschema, autoload=True, autoload_with=connection) else: schema.Table(rtbl, table.metadata, autoload=True, autoload_with=connection) if rfknm != fknm: if fknm: table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm)) fknm, scols, rcols = (rfknm, [], []) if (not scol in scols): scols.append(scol) if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol)) if fknm and scols: table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
def reflecttable(self, connection, table): #TODO: map these better column_func = { 14 : lambda r: sqltypes.String(r['FLEN']), # TEXT 7 : lambda r: sqltypes.Integer(), # SHORT 8 : lambda r: sqltypes.Integer(), # LONG 9 : lambda r: sqltypes.Float(), # QUAD 10 : lambda r: sqltypes.Float(), # FLOAT 27 : lambda r: sqltypes.Float(), # DOUBLE 35 : lambda r: sqltypes.DateTime(), # TIMESTAMP 37 : lambda r: sqltypes.String(r['FLEN']), # VARYING 261: lambda r: sqltypes.TEXT(), # BLOB 40 : lambda r: sqltypes.Char(r['FLEN']), # CSTRING 12 : lambda r: sqltypes.Date(), # DATE 13 : lambda r: sqltypes.Time(), # TIME 16 : lambda r: sqltypes.Numeric(precision=r['FPREC'], length=r['FSCALE'] * -1) #INT64 } tblqry = """ SELECT DISTINCT R.RDB$FIELD_NAME AS FNAME, R.RDB$NULL_FLAG AS NULL_FLAG, R.RDB$FIELD_POSITION, F.RDB$FIELD_TYPE AS FTYPE, F.RDB$FIELD_SUB_TYPE AS STYPE, F.RDB$FIELD_LENGTH AS FLEN, F.RDB$FIELD_PRECISION AS FPREC, F.RDB$FIELD_SCALE AS FSCALE FROM RDB$RELATION_FIELDS R JOIN RDB$FIELDS F ON R.RDB$FIELD_SOURCE=F.RDB$FIELD_NAME WHERE F.RDB$SYSTEM_FLAG=0 and R.RDB$RELATION_NAME=? ORDER BY R.RDB$FIELD_POSITION""" keyqry = """ SELECT SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDEX_SEGMENTS SE ON RC.RDB$INDEX_NAME=SE.RDB$INDEX_NAME WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=?""" fkqry = """ SELECT RC.RDB$CONSTRAINT_NAME CNAME, CSE.RDB$FIELD_NAME FNAME, IX2.RDB$RELATION_NAME RNAME, SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDICES IX1 ON IX1.RDB$INDEX_NAME=RC.RDB$INDEX_NAME JOIN RDB$INDICES IX2 ON IX2.RDB$INDEX_NAME=IX1.RDB$FOREIGN_KEY JOIN RDB$INDEX_SEGMENTS CSE ON CSE.RDB$INDEX_NAME=IX1.RDB$INDEX_NAME JOIN RDB$INDEX_SEGMENTS SE ON SE.RDB$INDEX_NAME=IX2.RDB$INDEX_NAME AND SE.RDB$FIELD_POSITION=CSE.RDB$FIELD_POSITION WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=? ORDER BY SE.RDB$INDEX_NAME, SE.RDB$FIELD_POSITION""" # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", table.name.upper()]) pkfields =[r['SENAME'] for r in c.fetchall()] # get all of the fields for this table def lower_if_possible(name): # Remove trailing spaces: FB uses a CHAR() type, # that is padded with spaces name = name.rstrip() # If its composed only by upper case chars, use # the lowered version, otherwise keep the original # (even if stripped...) lname = name.lower() if lname.upper() == name and not ' ' in name: return lname return name c = connection.execute(tblqry, [table.name.upper()]) row = c.fetchone() if not row: raise exceptions.NoSuchTableError(table.name) while row: name = row['FNAME'] args = [lower_if_possible(name)] kw = {} # get the data types and lengths args.append(column_func[row['FTYPE']](row)) # is it a primary key? kw['primary_key'] = name in pkfields table.append_column(schema.Column(*args, **kw)) row = c.fetchone() # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", table.name.upper()]) fks = {} while True: row = c.fetchone() if not row: break cname = lower_if_possible(row['CNAME']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = lower_if_possible(row['RNAME']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = lower_if_possible(row['FNAME']) refspec = rname + '.' + lower_if_possible(row['SENAME']) fk[0].append(fname) fk[1].append(refspec) for name,value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))
def reflecttable(self, connection, table, include_columns): # Query to extract the details of all the fields of the given table tblqry = """ SELECT DISTINCT r.rdb$field_name AS fname, r.rdb$null_flag AS null_flag, t.rdb$type_name AS ftype, f.rdb$field_sub_type AS stype, f.rdb$field_length AS flen, f.rdb$field_precision AS fprec, f.rdb$field_scale AS fscale, COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault FROM rdb$relation_fields r JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name JOIN rdb$types t ON t.rdb$type=f.rdb$field_type AND t.rdb$field_name='RDB$FIELD_TYPE' WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=? ORDER BY r.rdb$field_position """ # Query to extract the PK/FK constrained fields of the given table keyqry = """ SELECT se.rdb$field_name AS fname FROM rdb$relation_constraints rc JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? """ # Query to extract the details of each UK/FK of the given table fkqry = """ SELECT rc.rdb$constraint_name AS cname, cse.rdb$field_name AS fname, ix2.rdb$relation_name AS targetrname, se.rdb$field_name AS targetfname FROM rdb$relation_constraints rc JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key JOIN rdb$index_segments cse ON cse.rdb$index_name=ix1.rdb$index_name JOIN rdb$index_segments se ON se.rdb$index_name=ix2.rdb$index_name AND se.rdb$field_position=cse.rdb$field_position WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? ORDER BY se.rdb$index_name, se.rdb$field_position """ # Heuristic-query to determine the generator associated to a PK field genqry = """ SELECT trigdep.rdb$depended_on_name AS fgenerator FROM rdb$dependencies tabdep JOIN rdb$dependencies trigdep ON (tabdep.rdb$dependent_name=trigdep.rdb$dependent_name AND trigdep.rdb$depended_on_type=14 AND trigdep.rdb$dependent_type=2) JOIN rdb$triggers trig ON (trig.rdb$trigger_name=tabdep.rdb$dependent_name) WHERE tabdep.rdb$depended_on_name=? AND tabdep.rdb$depended_on_type=0 AND trig.rdb$trigger_type=1 AND tabdep.rdb$field_name=? AND (SELECT count(*) FROM rdb$dependencies trigdep2 WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2 """ tablename = self._denormalize_name(table.name) # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", tablename]) pkfields = [self._normalize_name(r['fname']) for r in c.fetchall()] # get all of the fields for this table c = connection.execute(tblqry, [tablename]) found_table = False while True: row = c.fetchone() if row is None: break found_table = True name = self._normalize_name(row['fname']) if include_columns and name not in include_columns: continue args = [name] kw = {} # get the data type coltype = ischema_names.get(row['ftype'].rstrip()) if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (str(row['ftype']), name)) coltype = sqltypes.NULLTYPE else: coltype = coltype(row) args.append(coltype) # is it a primary key? kw['primary_key'] = name in pkfields # is it nullable? kw['nullable'] = not bool(row['null_flag']) # does it have a default value? if row['fdefault'] is not None: # the value comes down as "DEFAULT 'value'" assert row['fdefault'].upper().startswith('DEFAULT '), row defvalue = row['fdefault'][8:] args.append(schema.DefaultClause(sql.text(defvalue))) col = schema.Column(*args, **kw) if kw['primary_key']: # if the PK is a single field, try to see if its linked to # a sequence thru a trigger if len(pkfields)==1: genc = connection.execute(genqry, [tablename, row['fname']]) genr = genc.fetchone() if genr is not None: col.sequence = schema.Sequence(self._normalize_name(genr['fgenerator'])) table.append_column(col) if not found_table: raise exc.NoSuchTableError(table.name) # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", tablename]) fks = {} while True: row = c.fetchone() if not row: break cname = self._normalize_name(row['cname']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = self._normalize_name(row['targetrname']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = self._normalize_name(row['fname']) refspec = rname + '.' + self._normalize_name(row['targetfname']) fk[0].append(fname) fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name, link_to_name=True))