def create_objective_summary_table(table_name, col_names): columns = [schema.Column(cn, types.Float, index=True) for cn in col_names] table = schema.Table( table_name, database.global_state.metadata, schema.Column('objective_id', types.Integer, schema.ForeignKey('objective.id'), primary_key=True), schema.Column('value', types.Float, index=True), *columns) table.create() return table
def _ensure_table_for_fk(self, metadata, fk): """create a placeholder Table object for the referent of a ForeignKey. """ if isinstance(fk._colspec, string_types): table_key, cname = fk._colspec.rsplit('.', 1) sname, tname = self._parse_table_key(table_key) if table_key not in metadata.tables: rel_t = sa_schema.Table(tname, metadata, schema=sname) else: rel_t = metadata.tables[table_key] if cname not in rel_t.c: rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
def setUp(self): super(RetainSchemaTest, self).setUp() metadata = schema.MetaData() self.test_table = schema.Table('test_table', metadata, schema.Column('x', types.Integer), schema.Column('y', types.Integer), mysql_engine='InnoDB') def gen_schema(engine): metadata.create_all(engine, checkfirst=False) self._gen_schema = gen_schema
def define_image_tags_table(meta): # Load the images table so the foreign key can be set up properly schema.Table('images', meta, autoload=True) image_tags = schema.Table('image_tags', meta, schema.Column('id', glance_schema.Integer(), primary_key=True, nullable=False), schema.Column('image_id', glance_schema.String(36), schema.ForeignKey('images.id'), nullable=False), schema.Column('value', glance_schema.String(255), nullable=False), schema.Column('created_at', glance_schema.DateTime(), nullable=False), schema.Column('updated_at', glance_schema.DateTime()), schema.Column('deleted_at', glance_schema.DateTime()), schema.Column('deleted', glance_schema.Boolean(), nullable=False, default=False), mysql_engine='InnoDB', mysql_charset='utf8') schema.Index('ix_image_tags_image_id', image_tags.c.image_id) schema.Index('ix_image_tags_image_id_tag_value', image_tags.c.image_id, image_tags.c.value) return image_tags
def check_constraint(self, name: Optional[str], source: str, condition: Union["TextClause", "ColumnElement[Any]"], schema: Optional[str] = None, **kw) -> Union["CheckConstraint"]: t = sa_schema.Table( source, self.metadata(), sa_schema.Column("x", Integer), schema=schema, ) ck = sa_schema.CheckConstraint(condition, name=name, **kw) t.append_constraint(ck) return ck
def _foreign_key_constraint(self, name, source, referent, local_cols, remote_cols, onupdate=None, ondelete=None, source_schema=None, referent_schema=None): m = sa_schema.MetaData() if source == referent: t1_cols = local_cols + remote_cols else: t1_cols = local_cols sa_schema.Table( referent, m, *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], schema=referent_schema) t1 = sa_schema.Table(source, m, *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], schema=source_schema) tname = "%s.%s" % (referent_schema, referent) if referent_schema \ else referent f = sa_schema.ForeignKeyConstraint( local_cols, ["%s.%s" % (tname, n) for n in remote_cols], name=name, onupdate=onupdate, ondelete=ondelete) t1.append_constraint(f) return f
def _prep_testing_database(options, file_config): from sqlalchemy.testing import engines from sqlalchemy import schema, inspect # also create alt schemas etc. here? if options.dropfirst: e = engines.utf8_engine() inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView(schema.Table(vname, schema.MetaData()))) try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema"))) for tname in reversed(inspector.get_table_names(order_by="foreign_key")): e.execute(schema.DropTable(schema.Table(tname, schema.MetaData()))) for tname in reversed(inspector.get_table_names( order_by="foreign_key", schema="test_schema")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema"))) e.dispose()
def primary_key_constraint( self, name: Optional[str], table_name: str, cols: Sequence[str], schema: Optional[str] = None, **dialect_kw ) -> "PrimaryKeyConstraint": m = self.metadata() columns = [sa_schema.Column(n, NULLTYPE) for n in cols] t = sa_schema.Table(table_name, m, *columns, schema=schema) p = sa_schema.PrimaryKeyConstraint( *[t.c[n] for n in cols], name=name, **dialect_kw ) return p
def upgrade(migrate_engine): meta.bind = migrate_engine stats = schema.Table( 'apistats', meta, schema.Column('id', types.Integer(), primary_key=True), schema.Column('host', types.String(80)), schema.Column('request_count', types.BigInteger()), schema.Column('error_count', types.BigInteger()), schema.Column('average_response_time', types.Float()), schema.Column('requests_per_tenant', types.Text()), schema.Column('requests_per_second', types.Float()), schema.Column('errors_per_second', types.Float()), schema.Column('created', types.DateTime, nullable=False), schema.Column('updated', types.DateTime, nullable=False)) stats.create()
def moretableinfo(self, connection, table): """Return (tabletype, {colname:foreignkey,...}) execute(SHOW CREATE TABLE child) => CREATE TABLE `child` ( `id` int(11) default NULL, `parent_id` int(11) default NULL, KEY `par_ind` (`parent_id`), CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`) REFERENCES `parent` (`id`) ON DELETE CASCADE\n) TYPE=InnoDB """ c = connection.execute("SHOW CREATE TABLE " + table.fullname, {}) desc_fetched = c.fetchone()[1] # this can come back as unicode if use_unicode=1 in the mysql connection if type(desc_fetched) is unicode: desc_fetched = str(desc_fetched) elif type(desc_fetched) is not str: # may get array.array object here, depending on version (such as mysql 4.1.14 vs. 4.1.11) desc_fetched = desc_fetched.tostring() desc = desc_fetched.strip() tabletype = '' lastparen = re.search(r'\)[^\)]*\Z', desc) if lastparen: match = re.search(r'\b(?:TYPE|ENGINE)=(?P<ttype>.+)\b', desc[lastparen.start():], re.I) if match: tabletype = match.group('ttype') fkpat = r'''CONSTRAINT [`"'](?P<name>.+?)[`"'] FOREIGN KEY \((?P<columns>.+?)\) REFERENCES [`"'](?P<reftable>.+?)[`"'] \((?P<refcols>.+?)\)''' for match in re.finditer(fkpat, desc): columns = re.findall(r'''[`"'](.+?)[`"']''', match.group('columns')) refcols = [ match.group('reftable') + "." + x for x in re.findall( r'''[`"'](.+?)[`"']''', match.group('refcols')) ] schema.Table(match.group('reftable'), table.metadata, autoload=True, autoload_with=connection) constraint = schema.ForeignKeyConstraint(columns, refcols, name=match.group('name')) table.append_constraint(constraint) return tabletype
def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" test_opts = dict([(k, kw.pop(k)) for k in kw.keys() if k.startswith('test_')]) kw.update(table_options) if testing.against('mysql'): if 'mysql_engine' not in kw and 'mysql_type' not in kw: if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts: kw['mysql_engine'] = 'InnoDB' else: kw['mysql_engine'] = 'MyISAM' # Apply some default cascading rules for self-referential foreign keys. # MySQL InnoDB has some issues around seleting self-refs too. if testing.against('firebird'): table_name = args[0] unpack = ( testing.config.db.dialect.identifier_preparer.unformat_identifiers) # Only going after ForeignKeys in Columns. May need to # expand to ForeignKeyConstraint too. fks = [ fk for col in args if isinstance(col, schema.Column) for fk in col.foreign_keys ] for fk in fks: # root around in raw spec ref = fk._colspec if isinstance(ref, schema.Column): name = ref.table.name else: # take just the table name: on FB there cannot be # a schema, so the first element is always the # table name, possibly followed by the field name name = unpack(ref)[0] if name == table_name: if fk.ondelete is None: fk.ondelete = 'CASCADE' if fk.onupdate is None: fk.onupdate = 'CASCADE' return schema.Table(*args, **kw)
def unique_constraint(self, name: Optional[str], source: str, local_cols: Sequence[str], schema: Optional[str] = None, **kw) -> "UniqueConstraint": t = sa_schema.Table( source, self.metadata(), *[sa_schema.Column(n, NULLTYPE) for n in local_cols], schema=schema) kw["name"] = name uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) # TODO: need event tests to ensure the event # is fired off here t.append_constraint(uq) return uq
def devicerealtime(self): if 'item' in request.params: ditem = request.params['item'] else: ditem = 'loadavg' if 'hn' in request.params: dname = request.params['hn'] else: dname = '' c.ditem = ditem c.dname='' d = model.Devices if dname: rows = model.meta.Session.query(d).filter(and_(d.ditem.like('%'+ditem+'%'),d.dname.like('%'+dname+'%'),d.max_value>0)).order_by(d.rank.desc()) else: rows = model.meta.Session.query(d).filter(and_(d.ditem.like('%'+ditem+'%'),d.max_value>0)).order_by(d.rank.desc()) rs = rows.all() c.dname = dname c.total = rows.count() metadata = model.meta.Base.metadata metadata.bind = model.meta.Session.bind d = str(datetime.now().date()) table = ''.join(d.split('-')) table = 'device_realtime_'+table flag = 1 c.rtime = '' import time c.reflash = ''.join(str(time.time()).split('.')) dt = int((datetime.now()-timedelta(minutes=15)).strftime('%H%M')) for x,t in enumerate(rs): r = schema.Table(table, metadata, autoload=True,include_columns=['id','did','dthour','dtvalue'],useexisting=True) result = model.meta.Session.query(r).filter(and_(r.c.did==t.id,r.c.dthour >= dt)).order_by(r.c.dthour.desc()).first() if result: while flag: if len(str(result.dthour)) <4: c.rtime = str(result.dthour)[0:1]+':'+str(result.dthour)[1:] else: c.rtime = str(result.dthour)[0:2]+':'+str(result.dthour)[2:] flag = 0 t.rank =result.dtvalue*100.0/t.max_value t.rank = round(t.rank,2) else: t.rank = -1 c.rows = sorted(rs,key=lambda d:d.rank,reverse=True) return render('/derived/devices/realtime.html')
def _ensure_table_for_fk( self, metadata: "MetaData", fk: "ForeignKey" ) -> None: """create a placeholder Table object for the referent of a ForeignKey. """ if isinstance(fk._colspec, str): # type:ignore[attr-defined] table_key, cname = fk._colspec.rsplit( # type:ignore[attr-defined] ".", 1 ) sname, tname = self._parse_table_key(table_key) if table_key not in metadata.tables: rel_t = sa_schema.Table(tname, metadata, schema=sname) else: rel_t = metadata.tables[table_key] if cname not in rel_t.c: rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
def index(self, name: str, tablename: Optional[str], columns: Sequence[Union[str, "TextClause", "ColumnElement[Any]"]], schema: Optional[str] = None, **kw) -> "Index": t = sa_schema.Table( tablename or "no_table", self.metadata(), schema=schema, ) kw["_table"] = t idx = sa_schema.Index( name, *[util.sqla_compat._textual_index_column(t, n) for n in columns], **kw) return idx
def build_mapper(self, name): NumOfCh = 16 schemalist = [ schema.Column('ch' + str(i), types.SMALLINT) for i in range(NumOfCh) ] table = schema.Table( name, self.metadata, schema.Column('ID', types.Integer, primary_key=True, autoincrement=True), *schemalist) cls = type(name, (), {}) orm.mapper(cls, table) return cls
def dropdb(): from nailgun.db import migration conn = engine.connect() trans = conn.begin() meta = MetaData() meta.reflect(bind=engine) inspector = reflection.Inspector.from_engine(engine) tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(schema.ForeignKeyConstraint((), (), name=fk['name'])) t = schema.Table(table_name, meta, *fks, extend_existing=True) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) custom_types = conn.execute( "SELECT n.nspname as schema, t.typname as type " "FROM pg_type t LEFT JOIN pg_catalog.pg_namespace n " "ON n.oid = t.typnamespace " "WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' " "FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) " "AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el " "WHERE el.oid = t.typelem AND el.typarray = t.oid) " "AND n.nspname NOT IN ('pg_catalog', 'information_schema')") for tp in custom_types: conn.execute("DROP TYPE {0}".format(tp[1])) trans.commit() migration.drop_migration_meta(engine) conn.close() engine.dispose()
def _ensure_table_for_fk(self, metadata, fk): """create a placeholder Table object for the referent of a ForeignKey. """ if isinstance(fk._colspec, basestring): table_key, cname = fk._colspec.rsplit('.', 1) if '.' in table_key: tokens = table_key.split('.') sname = ".".join(tokens[0:-1]) tname = tokens[-1] else: tname = table_key sname = None if table_key not in metadata.tables: rel_t = schema.Table(tname, metadata, schema=sname) else: rel_t = metadata.tables[table_key] if cname not in rel_t.c: rel_t.append_column(schema.Column(cname, NULLTYPE))
def table(self, name, *columns, **kw): m = self.metadata() cols = [ sqla_compat._copy(c) if c.table is not None else c for c in columns if isinstance(c, Column) ] t = sa_schema.Table(name, m, *cols, **kw) constraints = [ sqla_compat._copy(elem, target_table=t) if getattr( elem, "parent", None) is not None else elem for elem in columns if isinstance(elem, (Constraint, Index)) ] for const in constraints: t.append_constraint(const) for f in t.foreign_keys: self._ensure_table_for_fk(m, f) return t
def downgrade(migrate_engine): meta.bind = migrate_engine package_to_category = schema.Table('package_to_category', meta, autoload=True) package_to_category.drop() package_to_tag = schema.Table('package_to_tag', meta, autoload=True) package_to_tag.drop() class_definition = schema.Table('class_definition', meta, autoload=True) class_definition.drop() tag = schema.Table('tag', meta, autoload=True) tag.drop() category = schema.Table('category', meta, autoload=True) category.drop() package = schema.Table('package', meta, autoload=True) package.drop()
def realtimeview(self, id = 0,dname ='',ditem='',maxvalue=''): d = str(datetime.now().date()) table = ''.join(d.split('-')) table = 'device_realtime_'+table metadata = model.meta.Base.metadata metadata.bind = model.meta.Session.bind dt = int((datetime.now()-timedelta(minutes=15)).strftime('%H%M')) try: t = schema.Table(table, metadata, autoload=True,include_columns=['id','did','dthour','dtvalue'],useexisting=True) result = model.meta.Session.query(t).filter(and_(t.c.did==id,t.c.dthour >= dt)).order_by(t.c.dthour.desc()).first() if result: curvalue = result.dtvalue graph = self.makeChart_Semi(curvalue=curvalue, dname=dname, ditem=ditem, maxvalue=maxvalue) response.headers['Content-type'] = 'image/png' return graph else: graph = self.makeChart_Semi(curvalue=0, dname=dname, ditem=ditem, maxvalue=maxvalue,nodata=True) response.headers['Content-type'] = 'image/png' return graph except NoSuchTableError: return 'no such table'
def drop_all_objects(self, engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sqlalchemy.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): # note that SQLite reflection does not have names # for foreign keys until SQLAlchemy 1.0 if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name']) ) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if self.supports_drop_fk: for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) self.drop_additional_objects(conn)
def drop_all_objects(engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sa.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk["name"]: continue fks.append(schema.ForeignKeyConstraint((), (), name=fk["name"])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if engine.name != "sqlite": for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) if engine.name == "postgresql": if compat_utils.sqla_100: enums = [e["name"] for e in sa.inspect(conn).get_enums()] else: enums = conn.dialect._load_enums(conn).keys() for e in enums: conn.execute("DROP TYPE %s" % e)
def main(): metadata = schema.MetaData() page_table = schema.Table( 'page', metadata, schema.Column('id', types.Integer, primary_key=True), schema.Column('name', types.Unicode(255), default=u''), schema.Column('title', types.Unicode(255), default=u'Untitled Page'), schema.Column('content', types.Text(), default=u''), ) print(metadata.tables['page'] is page_table) for t in metadata.sorted_tables: print("Table name: ", t.name) print("t is page_table: ", t is page_table) for column in page_table.columns: print("Column Table name: ", column.type) engine = create_engine('sqlite:///:memory:', echo=True) metadata.bind = engine metadata.create_all(checkfirst=True)
def _cleanup(self): engine = self.get_engine() with engine.begin() as conn: inspector = reflection.Inspector.from_engine(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table))
def table(self, name, *columns, **kw): m = self.metadata() t = sa_schema.Table(name, m, *columns, **kw) for f in t.foreign_keys: self._ensure_table_for_fk(m, f) return t
def reflecttable(self, connection, table, include_columns): import sqlalchemy.databases.information_schema as ischema # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name(connection) columns = self.uppercase_table(ischema.columns) s = sql.select([columns], current_schema and sql.and_(columns.c.table_name==table.name, columns.c.table_schema==current_schema) or columns.c.table_name==table.name, order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = ( row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default] ) if include_columns and name not in include_columns: continue args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = self.ischema_names.get(type, None) if coltype == MSString and charlen == -1: coltype = MSText() else: if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (type, name)) coltype = sqltypes.NULLTYPE elif coltype in (MSNVarchar, AdoMSNVarchar) and charlen == -1: args[0] = None coltype = coltype(*args) colargs= [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column(schema.Column(name, coltype, nullable=nullable, autoincrement=False, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # We also run an sp_columns to check for identity columns: cursor = connection.execute("sp_columns @table_name = '%s', @table_owner = '%s'" % (table.name, current_schema)) ic = None while True: row = cursor.fetchone() if row is None: break col_name, type_name = row[3], row[5] if type_name.endswith("identity"): ic = table.c[col_name] ic.autoincrement = True # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute ic.sequence = schema.Sequence(ic.name + '_identity') # MSSQL: only one identity per table allowed cursor.close() break if not ic is None: try: cursor = connection.execute("select ident_seed(?), ident_incr(?)", table.fullname, table.fullname) row = cursor.fetchone() cursor.close() if not row is None: ic.sequence.start=int(row[0]) ic.sequence.increment=int(row[1]) except: # ignoring it, works just like before pass # Add constraints RR = self.uppercase_table(ischema.ref_constraints) #information_schema.referential_constraints TC = self.uppercase_table(ischema.constraints) #information_schema.table_constraints C = self.uppercase_table(ischema.pg_key_constraints).alias('C') #information_schema.constraint_column_usage: the constrained column R = self.uppercase_table(ischema.pg_key_constraints).alias('R') #information_schema.constraint_column_usage: the referenced column # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name, C.c.table_name == table.name)) c = connection.execute(s) for row in c: if 'PRIMARY' in row[TC.c.constraint_type.name]: table.primary_key.add(table.c[row[0]]) # Foreign key constraints s = sql.select([C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name, RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule], sql.and_(C.c.table_name == table.name, C.c.table_schema == (table.schema or current_schema), C.c.constraint_name == RR.c.constraint_name, R.c.constraint_name == RR.c.unique_constraint_name, C.c.ordinal_position == R.c.ordinal_position ), order_by = [RR.c.constraint_name, R.c.ordinal_position]) rows = connection.execute(s).fetchall() def _gen_fkref(table, rschema, rtbl, rcol): if table.schema and rschema != table.schema or rschema != current_schema: return '.'.join([rschema, rtbl, rcol]) else: return '.'.join([rtbl, rcol]) # group rows by constraint ID, to handle multi-column FKs fknm, scols, rcols = (None, [], []) for r in rows: scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r if table.schema and rschema != table.schema or rschema != current_schema: schema.Table(rtbl, table.metadata, schema=rschema, autoload=True, autoload_with=connection) else: schema.Table(rtbl, table.metadata, autoload=True, autoload_with=connection) if rfknm != fknm: if fknm: table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm)) fknm, scols, rcols = (rfknm, [], []) if (not scol in scols): scols.append(scol) if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol)) if fknm and scols: table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
def reflecttable(self, connection, table): #TODO: map these better column_func = { 14 : lambda r: sqltypes.String(r['FLEN']), # TEXT 7 : lambda r: sqltypes.Integer(), # SHORT 8 : lambda r: sqltypes.Integer(), # LONG 9 : lambda r: sqltypes.Float(), # QUAD 10 : lambda r: sqltypes.Float(), # FLOAT 27 : lambda r: sqltypes.Float(), # DOUBLE 35 : lambda r: sqltypes.DateTime(), # TIMESTAMP 37 : lambda r: sqltypes.String(r['FLEN']), # VARYING 261: lambda r: sqltypes.TEXT(), # BLOB 40 : lambda r: sqltypes.Char(r['FLEN']), # CSTRING 12 : lambda r: sqltypes.Date(), # DATE 13 : lambda r: sqltypes.Time(), # TIME 16 : lambda r: sqltypes.Numeric(precision=r['FPREC'], length=r['FSCALE'] * -1) #INT64 } tblqry = """ SELECT DISTINCT R.RDB$FIELD_NAME AS FNAME, R.RDB$NULL_FLAG AS NULL_FLAG, R.RDB$FIELD_POSITION, F.RDB$FIELD_TYPE AS FTYPE, F.RDB$FIELD_SUB_TYPE AS STYPE, F.RDB$FIELD_LENGTH AS FLEN, F.RDB$FIELD_PRECISION AS FPREC, F.RDB$FIELD_SCALE AS FSCALE FROM RDB$RELATION_FIELDS R JOIN RDB$FIELDS F ON R.RDB$FIELD_SOURCE=F.RDB$FIELD_NAME WHERE F.RDB$SYSTEM_FLAG=0 and R.RDB$RELATION_NAME=? ORDER BY R.RDB$FIELD_POSITION""" keyqry = """ SELECT SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDEX_SEGMENTS SE ON RC.RDB$INDEX_NAME=SE.RDB$INDEX_NAME WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=?""" fkqry = """ SELECT RC.RDB$CONSTRAINT_NAME CNAME, CSE.RDB$FIELD_NAME FNAME, IX2.RDB$RELATION_NAME RNAME, SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDICES IX1 ON IX1.RDB$INDEX_NAME=RC.RDB$INDEX_NAME JOIN RDB$INDICES IX2 ON IX2.RDB$INDEX_NAME=IX1.RDB$FOREIGN_KEY JOIN RDB$INDEX_SEGMENTS CSE ON CSE.RDB$INDEX_NAME=IX1.RDB$INDEX_NAME JOIN RDB$INDEX_SEGMENTS SE ON SE.RDB$INDEX_NAME=IX2.RDB$INDEX_NAME AND SE.RDB$FIELD_POSITION=CSE.RDB$FIELD_POSITION WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=? ORDER BY SE.RDB$INDEX_NAME, SE.RDB$FIELD_POSITION""" # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", table.name.upper()]) pkfields =[r['SENAME'] for r in c.fetchall()] # get all of the fields for this table def lower_if_possible(name): # Remove trailing spaces: FB uses a CHAR() type, # that is padded with spaces name = name.rstrip() # If its composed only by upper case chars, use # the lowered version, otherwise keep the original # (even if stripped...) lname = name.lower() if lname.upper() == name and not ' ' in name: return lname return name c = connection.execute(tblqry, [table.name.upper()]) row = c.fetchone() if not row: raise exceptions.NoSuchTableError(table.name) while row: name = row['FNAME'] args = [lower_if_possible(name)] kw = {} # get the data types and lengths args.append(column_func[row['FTYPE']](row)) # is it a primary key? kw['primary_key'] = name in pkfields table.append_column(schema.Column(*args, **kw)) row = c.fetchone() # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", table.name.upper()]) fks = {} while True: row = c.fetchone() if not row: break cname = lower_if_possible(row['CNAME']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = lower_if_possible(row['RNAME']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = lower_if_possible(row['FNAME']) refspec = rname + '.' + lower_if_possible(row['SENAME']) fk[0].append(fname) fk[1].append(refspec) for name,value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))
mac_key = xor(b64decode(secret_b64), dh_shared) return b64encode(mac_key), b64encode(btwoc(dh_public)) ############################## Database tables and models #################### from sqlalchemy import schema from sqlalchemy import types from sqlalchemy import orm from sqlalchemy import and_ metadata = schema.MetaData() openid_redirects_table = schema.Table( 'openid_redirects', metadata, schema.Column('token', types.Unicode(255), primary_key=True), schema.Column('url', types.Text(), default=u''), schema.Column('site', types.Text(), default=u''), schema.Column('handle', types.Text(), default=u'')) openid_handles_table = schema.Table( 'openid_handles', metadata, schema.Column('handler', types.Unicode(255), primary_key=True), schema.Column('secret', types.Text(), default=u''), schema.Column('assoc_type', types.Text(), default=u''), schema.Column('private', types.Boolean(), default=False)) openid_sites_table = schema.Table( 'openid_sites', metadata, schema.Column('id', types.Integer, schema.Sequence('openid_sites_seq_id', optional=True),
def reflecttable(self, connection, table, include_columns): # Query to extract the details of all the fields of the given table tblqry = """ SELECT DISTINCT r.rdb$field_name AS fname, r.rdb$null_flag AS null_flag, t.rdb$type_name AS ftype, f.rdb$field_sub_type AS stype, f.rdb$field_length AS flen, f.rdb$field_precision AS fprec, f.rdb$field_scale AS fscale, COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault FROM rdb$relation_fields r JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name JOIN rdb$types t ON t.rdb$type=f.rdb$field_type AND t.rdb$field_name='RDB$FIELD_TYPE' WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=? ORDER BY r.rdb$field_position """ # Query to extract the PK/FK constrained fields of the given table keyqry = """ SELECT se.rdb$field_name AS fname FROM rdb$relation_constraints rc JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? """ # Query to extract the details of each UK/FK of the given table fkqry = """ SELECT rc.rdb$constraint_name AS cname, cse.rdb$field_name AS fname, ix2.rdb$relation_name AS targetrname, se.rdb$field_name AS targetfname FROM rdb$relation_constraints rc JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key JOIN rdb$index_segments cse ON cse.rdb$index_name=ix1.rdb$index_name JOIN rdb$index_segments se ON se.rdb$index_name=ix2.rdb$index_name AND se.rdb$field_position=cse.rdb$field_position WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? ORDER BY se.rdb$index_name, se.rdb$field_position """ # Heuristic-query to determine the generator associated to a PK field genqry = """ SELECT trigdep.rdb$depended_on_name AS fgenerator FROM rdb$dependencies tabdep JOIN rdb$dependencies trigdep ON (tabdep.rdb$dependent_name=trigdep.rdb$dependent_name AND trigdep.rdb$depended_on_type=14 AND trigdep.rdb$dependent_type=2) JOIN rdb$triggers trig ON (trig.rdb$trigger_name=tabdep.rdb$dependent_name) WHERE tabdep.rdb$depended_on_name=? AND tabdep.rdb$depended_on_type=0 AND trig.rdb$trigger_type=1 AND tabdep.rdb$field_name=? AND (SELECT count(*) FROM rdb$dependencies trigdep2 WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2 """ tablename = self._denormalize_name(table.name) # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", tablename]) pkfields = [self._normalize_name(r['fname']) for r in c.fetchall()] # get all of the fields for this table c = connection.execute(tblqry, [tablename]) found_table = False while True: row = c.fetchone() if row is None: break found_table = True name = self._normalize_name(row['fname']) if include_columns and name not in include_columns: continue args = [name] kw = {} # get the data type coltype = ischema_names.get(row['ftype'].rstrip()) if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (str(row['ftype']), name)) coltype = sqltypes.NULLTYPE else: coltype = coltype(row) args.append(coltype) # is it a primary key? kw['primary_key'] = name in pkfields # is it nullable? kw['nullable'] = not bool(row['null_flag']) # does it have a default value? if row['fdefault'] is not None: # the value comes down as "DEFAULT 'value'" assert row['fdefault'].upper().startswith('DEFAULT '), row defvalue = row['fdefault'][8:] args.append(schema.DefaultClause(sql.text(defvalue))) col = schema.Column(*args, **kw) if kw['primary_key']: # if the PK is a single field, try to see if its linked to # a sequence thru a trigger if len(pkfields)==1: genc = connection.execute(genqry, [tablename, row['fname']]) genr = genc.fetchone() if genr is not None: col.sequence = schema.Sequence(self._normalize_name(genr['fgenerator'])) table.append_column(col) if not found_table: raise exc.NoSuchTableError(table.name) # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", tablename]) fks = {} while True: row = c.fetchone() if not row: break cname = self._normalize_name(row['cname']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = self._normalize_name(row['targetrname']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = self._normalize_name(row['fname']) refspec = rname + '.' + self._normalize_name(row['targetfname']) fk[0].append(fname) fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name, link_to_name=True))