def _locate_owner_row(self, owner, name, rows, raiseerr=False): """return the row in the given list of rows which references the given table name and owner name.""" if not rows: if raiseerr: raise exceptions.NoSuchTableError(name) else: return None else: if owner is not None: for row in rows: if owner.upper() in row[0]: return row else: if raiseerr: raise exceptions.AssertionError( "Specified owner %s does not own table %s" % (owner, name)) else: return None else: if len(rows) == 1: return rows[0] else: if raiseerr: raise exceptions.AssertionError( "There are multiple tables with name '%s' visible to the schema, you must specifiy owner" % name) else: return None
def reflecttable(self, connection, table): # to use information_schema: #ischema.reflecttable(self, table, ischema_names, use_mysql=True) c = connection.execute("describe " + table.name, {}) found_table = False while True: row = c.fetchone() if row is None: break #print "row! " + repr(row) if not found_table: found_table = True (name, type, nullable, primary_key, default) = (row[0], row[1], row[2] == 'YES', row[3] == 'PRI', row[4]) match = re.match(r'(\w+)(\(.*?\))?\s*(\w+)?\s*(\w+)?', type) col_type = match.group(1) args = match.group(2) extra_1 = match.group(3) extra_2 = match.group(4) #print "coltype: " + repr(col_type) + " args: " + repr(args) + "extras:" + repr(extra_1) + ' ' + repr(extra_2) coltype = ischema_names.get(col_type, MSString) kw = {} if extra_1 is not None: kw[extra_1] = True if extra_2 is not None: kw[extra_2] = True if args is not None: if col_type == 'enum': args = args[1:-1] argslist = args.split(',') coltype = coltype(*argslist, **kw) else: argslist = re.findall(r'(\d+)', args) coltype = coltype(*[int(a) for a in argslist], **kw) table.append_item( schema.Column( name, coltype, **dict(primary_key=primary_key, nullable=nullable, default=default))) tabletype = self.moretableinfo(connection, table=table) table.kwargs['mysql_engine'] = tabletype if not found_table: raise exceptions.NoSuchTableError(table.name)
def reflecttable(self, connection, table, include_columns = None): """ Inputs: - sqlalchemy.engine.base.Connection object has a <connection> reference to sqlalchemy.pool._ConnectionFairy which has a <connection> reference to sqlalchemy.databases.ibm_db_dbi.Connection, the actual DBAPI driver connection handler - table object - include_columns (a list or set) limits the autoload to the given column names. """ dialect.logger.debug("\n *** IBM_DBDialect::reflecttable( "+str(table)+', '+str(include_columns)+' )') ibm_dbi_conn = connection.connection.connection schema_name = self.get_default_schema_name(connection) if table.schema is not None: schema_name = table.schema # Append columns to table #columns = ibm_dbi_conn.columns( schema_name, table.name, include_columns) cu = ibm_dbi_conn.cursor() cu.execute( """select TABLE_NAME , COLUMN_NAME , ORDINAL_POSITION , DATA_TYPE as TYPE_NAME , IS_NULLABLE from SYSIBM.COLUMNS where TABLE_SCHEMA = '%s' and TABLE_NAME = '%s'""" % ( schema_name.upper() , table.name.upper() ) ) columns = cu.fetchall() if not columns: raise exceptions.NoSuchTableError(table.name) for col in columns: (tab_name, col_name, col_id, col_type, is_nullable) = ( col[0].lower(), col[1].lower(), col[2], col[3], col[4] == 'YES' ) col_args= [] type = ischema_names.get(col_type, None) column = schema.Column(col_name, type, nullable=is_nullable, *col_args) dialect.logger.debug("\n *** column: " + repr(column)) table.append_column(column) # Define table's primary keys # 由于用处不大,暂时不处理 """
def reflecttable(self, connection, table, include_columns): preparer = self.identifier_preparer if table.schema is None: pragma = "PRAGMA " else: pragma = "PRAGMA %s." % preparer.quote_identifier(table.schema) qtable = preparer.format_table(table, False) c = connection.execute("%stable_info(%s)" % (pragma, qtable)) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type_, nullable, has_default, primary_key) = (row[1], row[2].upper(), not row[3], row[4] is not None, row[5]) name = re.sub(r'^\"|\"$', '', name) if include_columns and name not in include_columns: continue match = re.match(r'(\w+)(\(.*?\))?', type_) if match: coltype = match.group(1) args = match.group(2) else: coltype = "VARCHAR" args = '' try: coltype = ischema_names[coltype] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (coltype, name)) coltype = sqltypes.NullType if args is not None: args = re.findall(r'(\d+)', args) coltype = coltype(*[int(a) for a in args]) colargs = [] if has_default: colargs.append(PassiveDefault('?')) table.append_column( schema.Column(name, coltype, primary_key=primary_key, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) c = connection.execute("%sforeign_key_list(%s)" % (pragma, qtable)) fks = {} while True: row = c.fetchone() if row is None: break (constraint_name, tablename, localcol, remotecol) = (row[0], row[2], row[3], row[4]) tablename = re.sub(r'^\"|\"$', '', tablename) localcol = re.sub(r'^\"|\"$', '', localcol) remotecol = re.sub(r'^\"|\"$', '', remotecol) try: fk = fks[constraint_name] except KeyError: fk = ([], []) fks[constraint_name] = fk # look up the table based on the given table's engine, not 'self', # since it could be a ProxyEngine remotetable = schema.Table(tablename, table.metadata, autoload=True, autoload_with=connection) constrained_column = table.c[localcol].name refspec = ".".join([tablename, remotecol]) if constrained_column not in fk[0]: fk[0].append(constrained_column) if refspec not in fk[1]: fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint( schema.ForeignKeyConstraint(value[0], value[1])) # check for UNIQUE indexes c = connection.execute("%sindex_list(%s)" % (pragma, qtable)) unique_indexes = [] while True: row = c.fetchone() if row is None: break if (row[2] == 1): unique_indexes.append(row[1]) # loop thru unique indexes for one that includes the primary key for idx in unique_indexes: c = connection.execute("%sindex_info(%s)" % (pragma, idx)) cols = [] while True: row = c.fetchone() if row is None: break cols.append(row[2])
def reflecttable(self, connection, table, include_columns): #TODO: map these better column_func = { 14 : lambda r: sqltypes.String(r['FLEN']), # TEXT 7 : lambda r: sqltypes.Integer(), # SHORT 8 : lambda r: r['FPREC']==0 and sqltypes.Integer() or sqltypes.Numeric(precision=r['FPREC'], length=r['FSCALE'] * -1), #INT or NUMERIC 9 : lambda r: sqltypes.Float(), # QUAD 10 : lambda r: sqltypes.Float(), # FLOAT 27 : lambda r: sqltypes.Float(), # DOUBLE 35 : lambda r: sqltypes.DateTime(), # TIMESTAMP 37 : lambda r: sqltypes.String(r['FLEN']), # VARYING 261: lambda r: sqltypes.TEXT(), # BLOB 40 : lambda r: sqltypes.Char(r['FLEN']), # CSTRING 12 : lambda r: sqltypes.Date(), # DATE 13 : lambda r: sqltypes.Time(), # TIME 16 : lambda r: sqltypes.Numeric(precision=r['FPREC'], length=r['FSCALE'] * -1) #INT64 } tblqry = """ SELECT DISTINCT R.RDB$FIELD_NAME AS FNAME, R.RDB$NULL_FLAG AS NULL_FLAG, R.RDB$FIELD_POSITION, F.RDB$FIELD_TYPE AS FTYPE, F.RDB$FIELD_SUB_TYPE AS STYPE, F.RDB$FIELD_LENGTH AS FLEN, F.RDB$FIELD_PRECISION AS FPREC, F.RDB$FIELD_SCALE AS FSCALE FROM RDB$RELATION_FIELDS R JOIN RDB$FIELDS F ON R.RDB$FIELD_SOURCE=F.RDB$FIELD_NAME WHERE F.RDB$SYSTEM_FLAG=0 and R.RDB$RELATION_NAME=? ORDER BY R.RDB$FIELD_POSITION""" keyqry = """ SELECT SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDEX_SEGMENTS SE ON RC.RDB$INDEX_NAME=SE.RDB$INDEX_NAME WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=?""" fkqry = """ SELECT RC.RDB$CONSTRAINT_NAME CNAME, CSE.RDB$FIELD_NAME FNAME, IX2.RDB$RELATION_NAME RNAME, SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDICES IX1 ON IX1.RDB$INDEX_NAME=RC.RDB$INDEX_NAME JOIN RDB$INDICES IX2 ON IX2.RDB$INDEX_NAME=IX1.RDB$FOREIGN_KEY JOIN RDB$INDEX_SEGMENTS CSE ON CSE.RDB$INDEX_NAME=IX1.RDB$INDEX_NAME JOIN RDB$INDEX_SEGMENTS SE ON SE.RDB$INDEX_NAME=IX2.RDB$INDEX_NAME AND SE.RDB$FIELD_POSITION=CSE.RDB$FIELD_POSITION WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=? ORDER BY SE.RDB$INDEX_NAME, SE.RDB$FIELD_POSITION""" # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", self._denormalize_name(table.name)]) pkfields =[self._normalize_name(r['SENAME']) for r in c.fetchall()] # get all of the fields for this table c = connection.execute(tblqry, [self._denormalize_name(table.name)]) found_table = False while True: row = c.fetchone() if row is None: break found_table = True name = self._normalize_name(row['FNAME']) if include_columns and name not in include_columns: continue args = [name] kw = {} # get the data types and lengths coltype = column_func.get(row['FTYPE'], None) if coltype is None: warnings.warn(RuntimeWarning("Did not recognize type '%s' of column '%s'" % (str(row['FTYPE']), name))) coltype = sqltypes.NULLTYPE else: coltype = coltype(row) args.append(coltype) # is it a primary key? kw['primary_key'] = name in pkfields # is it nullable ? kw['nullable'] = not bool(row['NULL_FLAG']) table.append_column(schema.Column(*args, **kw)) if not found_table: raise exceptions.NoSuchTableError(table.name) # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", self._denormalize_name(table.name)]) fks = {} while True: row = c.fetchone() if not row: break cname = self._normalize_name(row['CNAME']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = self._normalize_name(row['RNAME']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = self._normalize_name(row['FNAME']) refspec = rname + '.' + self._normalize_name(row['SENAME']) fk[0].append(fname) fk[1].append(refspec) for name,value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))
def reflecttable(self, connection, table): c = connection.execute("PRAGMA table_info(" + table.name + ")", {}) found_table = False while True: row = c.fetchone() if row is None: break #print "row! " + repr(row) found_table = True (name, type, nullable, has_default, primary_key) = (row[1], row[2].upper(), not row[3], row[4] is not None, row[5]) name = re.sub(r'^\"|\"$', '', name) match = re.match(r'(\w+)(\(.*?\))?', type) if match: coltype = match.group(1) args = match.group(2) else: coltype = "VARCHAR" args = '' #print "coltype: " + repr(coltype) + " args: " + repr(args) coltype = pragma_names.get(coltype, SLString) if args is not None: args = re.findall(r'(\d+)', args) #print "args! " +repr(args) coltype = coltype(*[int(a) for a in args]) colargs = [] if has_default: colargs.append(PassiveDefault('?')) table.append_column( schema.Column(name, coltype, primary_key=primary_key, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) c = connection.execute("PRAGMA foreign_key_list(" + table.name + ")", {}) fks = {} while True: row = c.fetchone() if row is None: break (constraint_name, tablename, localcol, remotecol) = (row[0], row[2], row[3], row[4]) tablename = re.sub(r'^\"|\"$', '', tablename) localcol = re.sub(r'^\"|\"$', '', localcol) remotecol = re.sub(r'^\"|\"$', '', remotecol) try: fk = fks[constraint_name] except KeyError: fk = ([], []) fks[constraint_name] = fk #print "row! " + repr([key for key in row.keys()]), repr(row) # look up the table based on the given table's engine, not 'self', # since it could be a ProxyEngine remotetable = schema.Table(tablename, table.metadata, autoload=True, autoload_with=connection) constrained_column = table.c[localcol].name refspec = ".".join([tablename, remotecol]) if constrained_column not in fk[0]: fk[0].append(constrained_column) if refspec not in fk[1]: fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint( schema.ForeignKeyConstraint(value[0], value[1])) # check for UNIQUE indexes c = connection.execute("PRAGMA index_list(" + table.name + ")", {}) unique_indexes = [] while True: row = c.fetchone() if row is None: break if (row[2] == 1): unique_indexes.append(row[1]) # loop thru unique indexes for one that includes the primary key for idx in unique_indexes: c = connection.execute("PRAGMA index_info(" + idx + ")", {}) cols = [] while True: row = c.fetchone() if row is None: break cols.append(row[2]) col = table.columns[row[2]] # unique index that includes the pk is considered a multiple primary key for col in cols: table.primary_key.add(table.columns[col])
def reflecttable(self, connection, table, include_columns): import sqlalchemy.databases.information_schema as ischema # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name(connection) columns = self.uppercase_table(ischema.columns) s = sql.select([columns], current_schema and sql.and_(columns.c.table_name==table.name, columns.c.table_schema==current_schema) or columns.c.table_name==table.name, order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = ( row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default] ) if include_columns and name not in include_columns: continue args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = self.ischema_names.get(type, None) if coltype == MSString and charlen == -1: coltype = MSText() else: if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (type, name)) coltype = sqltypes.NULLTYPE elif coltype in (MSNVarchar, AdoMSNVarchar) and charlen == -1: args[0] = None coltype = coltype(*args) colargs= [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column(schema.Column(name, coltype, nullable=nullable, autoincrement=False, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # We also run an sp_columns to check for identity columns: cursor = connection.execute("sp_columns @table_name = '%s', @table_owner = '%s'" % (table.name, current_schema)) ic = None while True: row = cursor.fetchone() if row is None: break col_name, type_name = row[3], row[5] if type_name.endswith("identity"): ic = table.c[col_name] ic.autoincrement = True # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute ic.sequence = schema.Sequence(ic.name + '_identity') # MSSQL: only one identity per table allowed cursor.close() break if not ic is None: try: cursor = connection.execute("select ident_seed(?), ident_incr(?)", table.fullname, table.fullname) row = cursor.fetchone() cursor.close() if not row is None: ic.sequence.start=int(row[0]) ic.sequence.increment=int(row[1]) except: # ignoring it, works just like before pass # Add constraints RR = self.uppercase_table(ischema.ref_constraints) #information_schema.referential_constraints TC = self.uppercase_table(ischema.constraints) #information_schema.table_constraints C = self.uppercase_table(ischema.pg_key_constraints).alias('C') #information_schema.constraint_column_usage: the constrained column R = self.uppercase_table(ischema.pg_key_constraints).alias('R') #information_schema.constraint_column_usage: the referenced column # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name, C.c.table_name == table.name)) c = connection.execute(s) for row in c: if 'PRIMARY' in row[TC.c.constraint_type.name]: table.primary_key.add(table.c[row[0]]) # Foreign key constraints s = sql.select([C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name, RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule], sql.and_(C.c.table_name == table.name, C.c.table_schema == (table.schema or current_schema), C.c.constraint_name == RR.c.constraint_name, R.c.constraint_name == RR.c.unique_constraint_name, C.c.ordinal_position == R.c.ordinal_position ), order_by = [RR.c.constraint_name, R.c.ordinal_position]) rows = connection.execute(s).fetchall() def _gen_fkref(table, rschema, rtbl, rcol): if table.schema and rschema != table.schema or rschema != current_schema: return '.'.join([rschema, rtbl, rcol]) else: return '.'.join([rtbl, rcol]) # group rows by constraint ID, to handle multi-column FKs fknm, scols, rcols = (None, [], []) for r in rows: scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r if table.schema and rschema != table.schema or rschema != current_schema: schema.Table(rtbl, table.metadata, schema=rschema, autoload=True, autoload_with=connection) else: schema.Table(rtbl, table.metadata, autoload=True, autoload_with=connection) if rfknm != fknm: if fknm: table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm)) fknm, scols, rcols = (rfknm, [], []) if (not scol in scols): scols.append(scol) if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol)) if fknm and scols: table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
def reflecttable(self, connection, table, include_columns): # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name(connection) s = sql.select([columns, domains], tables.c.table_name == table.name, from_obj=[columns.join(tables).join(domains)], order_by=[columns.c.column_id]) c = connection.execute(s) found_table = False # makes sure we append the columns in the correct order while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default, primary_key, max_identity, table_id, column_id) = ( row[columns.c.column_name], row[domains.c.domain_name], row[columns.c.nulls] == 'Y', row[columns.c.width], row[domains.c.precision], row[columns.c.scale], row[columns.c.default], row[columns.c.pkey] == 'Y', row[columns.c.max_identity], row[tables.c.table_id], row[columns.c.column_id], ) if include_columns and name not in include_columns: continue # FIXME: else problems with SybaseBinary(size) if numericscale == 0: numericscale = None args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = self.ischema_names.get(type, None) if coltype == SybaseString and charlen == -1: coltype = SybaseText() else: if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (type, name)) coltype = sqltypes.NULLTYPE coltype = coltype(*args) colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) # any sequences ? col = schema.Column(name, coltype, nullable=nullable, primary_key=primary_key, *colargs) if int(max_identity) > 0: col.sequence = schema.Sequence(name + '_identity') col.sequence.start = int(max_identity) col.sequence.increment = 1 # append the column table.append_column(col) # any foreign key constraint for this table ? # note: no multi-column foreign keys are considered s = "select st1.table_name, sc1.column_name, st2.table_name, sc2.column_name from systable as st1 join sysfkcol on st1.table_id=sysfkcol.foreign_table_id join sysforeignkey join systable as st2 on sysforeignkey.primary_table_id = st2.table_id join syscolumn as sc1 on sysfkcol.foreign_column_id=sc1.column_id and sc1.table_id=st1.table_id join syscolumn as sc2 on sysfkcol.primary_column_id=sc2.column_id and sc2.table_id=st2.table_id where st1.table_name='%(table_name)s';" % { 'table_name': table.name } c = connection.execute(s) foreignKeys = {} while True: row = c.fetchone() if row is None: break (foreign_table, foreign_column, primary_table, primary_column) = ( row[0], row[1], row[2], row[3], ) if not primary_table in foreignKeys.keys(): foreignKeys[primary_table] = [[ '%s' % (foreign_column) ], ['%s.%s' % (primary_table, primary_column)]] else: foreignKeys[primary_table][0].append('%s' % (foreign_column)) foreignKeys[primary_table][1].append( '%s.%s' % (primary_table, primary_column)) for primary_table in foreignKeys.keys(): #table.append_constraint(schema.ForeignKeyConstraint(['%s.%s'%(foreign_table, foreign_column)], ['%s.%s'%(primary_table,primary_column)])) table.append_constraint( schema.ForeignKeyConstraint(foreignKeys[primary_table][0], foreignKeys[primary_table][1])) if not found_table: raise exceptions.NoSuchTableError(table.name)
def reflecttable(self, connection, table, include_columns): preparer = self.identifier_preparer if table.schema is not None: schema_where_clause = "n.nspname = :schema" schemaname = table.schema if isinstance(schemaname, str): schemaname = schemaname.decode(self.encoding) else: schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)" schemaname = None SQL_COLS = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef) AS DEFAULT, a.attnotnull, a.attnum, a.attrelid as table_oid FROM pg_catalog.pg_attribute a WHERE a.attrelid = ( SELECT c.oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE (%s) AND c.relname = :table_name AND c.relkind in ('r','v') ) AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum """ % schema_where_clause s = sql.text(SQL_COLS, bindparams=[ sql.bindparam('table_name', type_=sqltypes.Unicode), sql.bindparam('schema', type_=sqltypes.Unicode) ], typemap={ 'attname': sqltypes.Unicode, 'default': sqltypes.Unicode }) tablename = table.name if isinstance(tablename, str): tablename = tablename.decode(self.encoding) c = connection.execute(s, table_name=tablename, schema=schemaname) rows = c.fetchall() if not rows: raise exceptions.NoSuchTableError(table.name) domains = self._load_domains(connection) for name, format_type, default, notnull, attnum, table_oid in rows: if include_columns and name not in include_columns: continue ## strip (30) from character varying(30) attype = re.search('([^\([]+)', format_type).group(1) nullable = not notnull is_array = format_type.endswith('[]') try: charlen = re.search('\(([\d,]+)\)', format_type).group(1) except: charlen = False numericprec = False numericscale = False if attype == 'numeric': if charlen is False: numericprec, numericscale = (None, None) else: numericprec, numericscale = charlen.split(',') charlen = False if attype == 'double precision': numericprec, numericscale = (53, False) charlen = False if attype == 'integer': numericprec, numericscale = (32, 0) charlen = False args = [] for a in (charlen, numericprec, numericscale): if a is None: args.append(None) elif a is not False: args.append(int(a)) kwargs = {} if attype == 'timestamp with time zone': kwargs['timezone'] = True elif attype == 'timestamp without time zone': kwargs['timezone'] = False if attype in ischema_names: coltype = ischema_names[attype] else: if attype in domains: domain = domains[attype] if domain['attype'] in ischema_names: # A table can't override whether the domain is nullable. nullable = domain['nullable'] if domain['default'] and not default: # It can, however, override the default value, but can't set it to null. default = domain['default'] coltype = ischema_names[domain['attype']] else: coltype = None if coltype: coltype = coltype(*args, **kwargs) if is_array: coltype = PGArray(coltype) else: util.warn("Did not recognize type '%s' of column '%s'" % (attype, name)) coltype = sqltypes.NULLTYPE colargs = [] if default is not None: match = re.search(r"""(nextval\(')([^']+)('.*$)""", default) if match is not None: # the default is related to a Sequence sch = table.schema if '.' not in match.group(2) and sch is not None: # unconditionally quote the schema name. this could # later be enhanced to obey quoting rules / "quote schema" default = match.group(1) + ( '"%s"' % sch) + '.' + match.group(2) + match.group(3) colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column( schema.Column(name, coltype, nullable=nullable, *colargs)) # Primary keys PK_SQL = """ SELECT attname FROM pg_attribute WHERE attrelid = ( SELECT indexrelid FROM pg_index i WHERE i.indrelid = :table AND i.indisprimary = 't') ORDER BY attnum """ t = sql.text(PK_SQL, typemap={'attname': sqltypes.Unicode}) c = connection.execute(t, table=table_oid) for row in c.fetchall(): pk = row[0] col = table.c[pk] table.primary_key.add(col) if col.default is None: col.autoincrement = False # Foreign keys FK_SQL = """ SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = :table AND r.contype = 'f' ORDER BY 1 """ t = sql.text(FK_SQL, typemap={ 'conname': sqltypes.Unicode, 'condef': sqltypes.Unicode }) c = connection.execute(t, table=table_oid) for conname, condef in c.fetchall(): m = re.search( 'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups() (constrained_columns, referred_schema, referred_table, referred_columns) = m constrained_columns = [ preparer._unquote_identifier(x) for x in re.split(r'\s*,\s*', constrained_columns) ] if referred_schema: referred_schema = preparer._unquote_identifier(referred_schema) elif table.schema is not None and table.schema == self.get_default_schema_name( connection): # no schema (i.e. its the default schema), and the table we're # reflecting has the default schema explicit, then use that. # i.e. try to use the user's conventions referred_schema = table.schema referred_table = preparer._unquote_identifier(referred_table) referred_columns = [ preparer._unquote_identifier(x) for x in re.split(r'\s*,\s', referred_columns) ] refspec = [] if referred_schema is not None: schema.Table(referred_table, table.metadata, autoload=True, schema=referred_schema, autoload_with=connection) for column in referred_columns: refspec.append(".".join( [referred_schema, referred_table, column])) else: schema.Table(referred_table, table.metadata, autoload=True, autoload_with=connection) for column in referred_columns: refspec.append(".".join([referred_table, column])) table.append_constraint( schema.ForeignKeyConstraint(constrained_columns, refspec, conname))
def reflecttable(self, connection, table, include_columns): # Query to extract the details of all the fields of the given table tblqry = """ SELECT DISTINCT r.rdb$field_name AS fname, r.rdb$null_flag AS null_flag, t.rdb$type_name AS ftype, f.rdb$field_sub_type AS stype, f.rdb$field_length AS flen, f.rdb$field_precision AS fprec, f.rdb$field_scale AS fscale, COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault FROM rdb$relation_fields r JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name JOIN rdb$types t ON t.rdb$type=f.rdb$field_type AND t.rdb$field_name='RDB$FIELD_TYPE' WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=? ORDER BY r.rdb$field_position """ # Query to extract the PK/FK constrained fields of the given table keyqry = """ SELECT se.rdb$field_name AS fname FROM rdb$relation_constraints rc JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? """ # Query to extract the details of each UK/FK of the given table fkqry = """ SELECT rc.rdb$constraint_name AS cname, cse.rdb$field_name AS fname, ix2.rdb$relation_name AS targetrname, se.rdb$field_name AS targetfname FROM rdb$relation_constraints rc JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key JOIN rdb$index_segments cse ON cse.rdb$index_name=ix1.rdb$index_name JOIN rdb$index_segments se ON se.rdb$index_name=ix2.rdb$index_name AND se.rdb$field_position=cse.rdb$field_position WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? ORDER BY se.rdb$index_name, se.rdb$field_position """ # Heuristic-query to determine the generator associated to a PK field genqry = """ SELECT trigdep.rdb$depended_on_name AS fgenerator FROM rdb$dependencies tabdep JOIN rdb$dependencies trigdep ON (tabdep.rdb$dependent_name=trigdep.rdb$dependent_name AND trigdep.rdb$depended_on_type=14 AND trigdep.rdb$dependent_type=2) JOIN rdb$triggers trig ON (trig.rdb$trigger_name=tabdep.rdb$dependent_name) WHERE tabdep.rdb$depended_on_name=? AND tabdep.rdb$depended_on_type=0 AND trig.rdb$trigger_type=1 AND tabdep.rdb$field_name=? AND (SELECT count(*) FROM rdb$dependencies trigdep2 WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2 """ tablename = self._denormalize_name(table.name) # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", tablename]) pkfields = [self._normalize_name(r['fname']) for r in c.fetchall()] # get all of the fields for this table c = connection.execute(tblqry, [tablename]) found_table = False while True: row = c.fetchone() if row is None: break found_table = True name = self._normalize_name(row['fname']) if include_columns and name not in include_columns: continue args = [name] kw = {} # get the data type coltype = ischema_names.get(row['ftype'].rstrip()) if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (str(row['ftype']), name)) coltype = sqltypes.NULLTYPE else: coltype = coltype(row) args.append(coltype) # is it a primary key? kw['primary_key'] = name in pkfields # is it nullable? kw['nullable'] = not bool(row['null_flag']) # does it have a default value? if row['fdefault'] is not None: # the value comes down as "DEFAULT 'value'" assert row['fdefault'].startswith('DEFAULT ') defvalue = row['fdefault'][8:] args.append(schema.PassiveDefault(sql.text(defvalue))) col = schema.Column(*args, **kw) if kw['primary_key']: # if the PK is a single field, try to see if its linked to # a sequence thru a trigger if len(pkfields) == 1: genc = connection.execute(genqry, [tablename, row['fname']]) genr = genc.fetchone() if genr is not None: col.sequence = schema.Sequence( self._normalize_name(genr['fgenerator'])) table.append_column(col) if not found_table: raise exceptions.NoSuchTableError(table.name) # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", tablename]) fks = {} while True: row = c.fetchone() if not row: break cname = self._normalize_name(row['cname']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = self._normalize_name(row['targetrname']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = self._normalize_name(row['fname']) refspec = rname + '.' + self._normalize_name(row['targetfname']) fk[0].append(fname) fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint( schema.ForeignKeyConstraint(value[0], value[1], name=name))
def reflecttable(self, connection, table): import sqlalchemy.databases.information_schema as ischema # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name() columns = ischema.columns s = sql.select([columns], current_schema and sql.and_(columns.c.table_name == table.name, columns.c.table_schema == current_schema) or columns.c.table_name == table.name, order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = (row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default]) args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = ischema_names[type] coltype = coltype(*args) colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_item( schema.Column(name, coltype, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # We also run an sp_columns to check for identity columns: # FIXME: note that this only fetches the existence of an identity column, not it's properties like (seed, increment) # also, add a check to make sure we specify the schema name of the table # cursor = table.engine.execute("sp_columns " + table.name, {}) cursor = connection.execute("sp_columns " + table.name) while True: row = cursor.fetchone() if row is None: break col_name, type_name = row[3], row[5] if type_name.endswith("identity"): ic = table.c[col_name] ic.primary_key = True # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute ic.sequence = schema.Sequence(ic.name + '_identity') # Add constraints RR = ischema.ref_constraints #information_schema.referential_constraints TC = ischema.constraints #information_schema.table_constraints C = ischema.column_constraints.alias( 'C' ) #information_schema.constraint_column_usage: the constrained column R = ischema.column_constraints.alias( 'R' ) #information_schema.constraint_column_usage: the referenced column fromjoin = TC.join(RR, RR.c.constraint_name == TC.c.constraint_name).join( C, C.c.constraint_name == RR.c.constraint_name) fromjoin = fromjoin.join( R, R.c.constraint_name == RR.c.unique_constraint_name) s = sql.select([ TC.c.constraint_type, C.c.table_schema, C.c.table_name, C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name ], sql.and_(RR.c.constraint_schema == current_schema, C.c.table_name == table.name), from_obj=[fromjoin], use_labels=True) colmap = [ TC.c.constraint_type, C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name ] c = connection.execute(s) while True: row = c.fetchone() if row is None: break print "CCROW", row.keys(), row (type, constrained_column, referred_schema, referred_table, referred_column) = (row[colmap[0]], row[colmap[1]], row[colmap[2]], row[colmap[3]], row[colmap[4]]) if type == 'PRIMARY KEY': table.c[constrained_column]._set_primary_key() elif type == 'FOREIGN KEY': if current_schema == referred_schema: referred_schema = table.schema remotetable = schema.Table(referred_table, table.metadata, autoload=True, autoload_with=connection, schema=referred_schema) table.c[constrained_column].append_item( schema.ForeignKey(remotetable.c[referred_column]))
def reflecttable(self, connection, table, include_columns): denormalize = self.identifier_preparer._denormalize_name normalize = self.identifier_preparer._normalize_name st = ('SELECT COLUMNNAME, MODE, DATATYPE, CODETYPE, LEN, DEC, ' ' NULLABLE, "DEFAULT", DEFAULTFUNCTION ' 'FROM COLUMNS ' 'WHERE TABLENAME=? AND SCHEMANAME=%s ' 'ORDER BY POS') fk = ('SELECT COLUMNNAME, FKEYNAME, ' ' REFSCHEMANAME, REFTABLENAME, REFCOLUMNNAME, RULE, ' ' (CASE WHEN REFSCHEMANAME = CURRENT_SCHEMA ' ' THEN 1 ELSE 0 END) AS in_schema ' 'FROM FOREIGNKEYCOLUMNS ' 'WHERE TABLENAME=? AND SCHEMANAME=%s ' 'ORDER BY FKEYNAME ') params = [denormalize(table.name)] if not table.schema: st = st % 'CURRENT_SCHEMA' fk = fk % 'CURRENT_SCHEMA' else: st = st % '?' fk = fk % '?' params.append(denormalize(table.schema)) rows = connection.execute(st, params).fetchall() if not rows: raise exceptions.NoSuchTableError(table.fullname) include_columns = util.Set(include_columns or []) for row in rows: (name, mode, col_type, encoding, length, scale, nullable, constant_def, func_def) = row name = normalize(name) if include_columns and name not in include_columns: continue type_args, type_kw = [], {} if col_type == 'FIXED': type_args = length, scale # Convert FIXED(10) DEFAULT SERIAL to our Integer if (scale == 0 and func_def is not None and func_def.startswith('SERIAL')): col_type = 'INTEGER' type_args = length, elif col_type in 'FLOAT': type_args = length, elif col_type in ('CHAR', 'VARCHAR'): type_args = length, type_kw['encoding'] = encoding elif col_type == 'LONG': type_kw['encoding'] = encoding try: type_cls = ischema_names[col_type.lower()] type_instance = type_cls(*type_args, **type_kw) except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (col_type, name)) type_instance = sqltypes.NullType col_kw = {'autoincrement': False} col_kw['nullable'] = (nullable == 'YES') col_kw['primary_key'] = (mode == 'KEY') if func_def is not None: if func_def.startswith('SERIAL'): if col_kw['primary_key']: # No special default- let the standard autoincrement # support handle SERIAL pk columns. col_kw['autoincrement'] = True else: # strip current numbering col_kw['default'] = schema.PassiveDefault( sql.text('SERIAL')) col_kw['autoincrement'] = True else: col_kw['default'] = schema.PassiveDefault( sql.text(func_def)) elif constant_def is not None: col_kw['default'] = schema.PassiveDefault( sql.text("'%s'" % constant_def.replace("'", "''"))) table.append_column(schema.Column(name, type_instance, **col_kw)) fk_sets = itertools.groupby(connection.execute(fk, params), lambda row: row.FKEYNAME) for fkeyname, fkey in fk_sets: fkey = list(fkey) if include_columns: key_cols = util.Set([r.COLUMNNAME for r in fkey]) if key_cols != include_columns: continue columns, referants = [], [] quote = self.identifier_preparer._maybe_quote_identifier for row in fkey: columns.append(normalize(row.COLUMNNAME)) if table.schema or not row.in_schema: referants.append('.'.join([ quote(normalize(row[c])) for c in ('REFSCHEMANAME', 'REFTABLENAME', 'REFCOLUMNNAME') ])) else: referants.append('.'.join([ quote(normalize(row[c])) for c in ('REFTABLENAME', 'REFCOLUMNNAME') ])) constraint_kw = {'name': fkeyname.lower()} if fkey[0].RULE is not None: rule = fkey[0].RULE if rule.startswith('DELETE '): rule = rule[7:] constraint_kw['ondelete'] = rule table_kw = {} if table.schema or not row.in_schema: table_kw['schema'] = normalize(fkey[0].REFSCHEMANAME) ref_key = schema._get_table_key(normalize(fkey[0].REFTABLENAME), table_kw.get('schema')) if ref_key not in table.metadata.tables: schema.Table(normalize(fkey[0].REFTABLENAME), table.metadata, autoload=True, autoload_with=connection, **table_kw) constraint = schema.ForeignKeyConstraint(columns, referants, **constraint_kw) table.append_constraint(constraint)
def reflecttable(self, connection, table): # reference: http://dev.mysql.com/doc/refman/5.0/en/name-case-sensitivity.html case_sensitive = int( connection.execute("show variables like 'lower_case_table_names'"). fetchone()[1]) == 0 if not case_sensitive: table.name = table.name.lower() table.metadata.tables[table.name] = table try: c = connection.execute("describe " + table.fullname, {}) except: raise exceptions.NoSuchTableError(table.name) found_table = False while True: row = c.fetchone() if row is None: break #print "row! " + repr(row) if not found_table: found_table = True # these can come back as unicode if use_unicode=1 in the mysql connection (name, type, nullable, primary_key, default) = (str(row[0]), str(row[1]), row[2] == 'YES', row[3] == 'PRI', row[4]) match = re.match(r'(\w+)(\(.*?\))?\s*(\w+)?\s*(\w+)?', type) col_type = match.group(1) args = match.group(2) extra_1 = match.group(3) extra_2 = match.group(4) #print "coltype: " + repr(col_type) + " args: " + repr(args) + "extras:" + repr(extra_1) + ' ' + repr(extra_2) coltype = ischema_names.get(col_type, MSString) kw = {} if extra_1 is not None: kw[extra_1] = True if extra_2 is not None: kw[extra_2] = True if args is not None: if col_type == 'enum': args = args[1:-1] argslist = args.split(',') coltype = coltype(*argslist, **kw) else: argslist = re.findall(r'(\d+)', args) coltype = coltype(*[int(a) for a in argslist], **kw) colargs = [] if default: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column( schema.Column( name, coltype, *colargs, **dict( primary_key=primary_key, nullable=nullable, ))) tabletype = self.moretableinfo(connection, table=table) table.kwargs['mysql_engine'] = tabletype if not found_table: raise exceptions.NoSuchTableError(table.name)
def reflecttable(self, connection, table): c = connection.execute("PRAGMA table_info(" + table.name + ")", {}) found_table = False while True: row = c.fetchone() if row is None: break #print "row! " + repr(row) found_table = True (name, type, nullable, primary_key) = (row[1], row[2].upper(), not row[3], row[5]) match = re.match(r'(\w+)(\(.*?\))?', type) coltype = match.group(1) args = match.group(2) #print "coltype: " + repr(coltype) + " args: " + repr(args) coltype = pragma_names.get(coltype, SLString) if args is not None: args = re.findall(r'(\d+)', args) #print "args! " +repr(args) coltype = coltype(*[int(a) for a in args]) table.append_item( schema.Column(name, coltype, primary_key=primary_key, nullable=nullable)) if not found_table: raise exceptions.NoSuchTableError(table.name) c = connection.execute("PRAGMA foreign_key_list(" + table.name + ")", {}) while True: row = c.fetchone() if row is None: break (tablename, localcol, remotecol) = (row[2], row[3], row[4]) #print "row! " + repr(row) # look up the table based on the given table's engine, not 'self', # since it could be a ProxyEngine remotetable = schema.Table(tablename, table.metadata, autoload=True, autoload_with=connection) table.c[localcol].append_item( schema.ForeignKey(remotetable.c[remotecol])) # check for UNIQUE indexes c = connection.execute("PRAGMA index_list(" + table.name + ")", {}) unique_indexes = [] while True: row = c.fetchone() if row is None: break if (row[2] == 1): unique_indexes.append(row[1]) # loop thru unique indexes for one that includes the primary key for idx in unique_indexes: c = connection.execute("PRAGMA index_info(" + idx + ")", {}) cols = [] while True: row = c.fetchone() if row is None: break cols.append(row[2]) col = table.columns[row[2]] # unique index that includes the pk is considered a multiple primary key for col in cols: column = table.columns[col] table.columns[col]._set_primary_key()
def reflecttable(self, connection, table): if self.version == 2: ischema_names = pg2_ischema_names else: ischema_names = pg1_ischema_names if self.use_information_schema: ischema.reflecttable(connection, table, ischema_names) else: preparer = self.identifier_preparer if table.schema is not None: schema_where_clause = "n.nspname = :schema" else: schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)" ## information schema in pg suffers from too many permissions' restrictions ## let us find out at the pg way what is needed... SQL_COLS = """ SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod), (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef) AS DEFAULT, a.attnotnull, a.attnum, a.attrelid as table_oid FROM pg_catalog.pg_attribute a WHERE a.attrelid = ( SELECT c.oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE (%s) AND c.relname = :table_name AND c.relkind in ('r','v') ) AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum """ % schema_where_clause s = sql.text(SQL_COLS) c = connection.execute(s, table_name=table.name, schema=table.schema) rows = c.fetchall() if not rows: raise exceptions.NoSuchTableError(table.name) for name, format_type, default, notnull, attnum, table_oid in rows: ## strip (30) from character varying(30) attype = re.search('([^\(]+)', format_type).group(1) nullable = not notnull try: charlen = re.search('\(([\d,]+)\)', format_type).group(1) except: charlen = False numericprec = False numericscale = False if attype == 'numeric': if charlen is False: numericprec, numericscale = (None, None) else: numericprec, numericscale = charlen.split(',') charlen = False if attype == 'double precision': numericprec, numericscale = (53, False) charlen = False if attype == 'integer': numericprec, numericscale = (32, 0) charlen = False args = [] for a in (charlen, numericprec, numericscale): if a is None: args.append(None) elif a is not False: args.append(int(a)) kwargs = {} if attype == 'timestamp with time zone': kwargs['timezone'] = True elif attype == 'timestamp without time zone': kwargs['timezone'] = False coltype = ischema_names[attype] coltype = coltype(*args, **kwargs) colargs = [] if default is not None: match = re.search(r"""(nextval\(')([^']+)('.*$)""", default) if match is not None: # the default is related to a Sequence sch = table.schema if '.' not in match.group(2) and sch is not None: default = match.group(1) + sch + '.' + match.group( 2) + match.group(3) colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column( schema.Column(name, coltype, nullable=nullable, *colargs)) # Primary keys PK_SQL = """ SELECT attname FROM pg_attribute WHERE attrelid = ( SELECT indexrelid FROM pg_index i WHERE i.indrelid = :table AND i.indisprimary = 't') ORDER BY attnum """ t = sql.text(PK_SQL) c = connection.execute(t, table=table_oid) for row in c.fetchall(): pk = row[0] table.primary_key.add(table.c[pk]) # Foreign keys FK_SQL = """ SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = :table AND r.contype = 'f' ORDER BY 1 """ t = sql.text(FK_SQL) c = connection.execute(t, table=table_oid) for conname, condef in c.fetchall(): m = re.search( 'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups() (constrained_columns, referred_schema, referred_table, referred_columns) = m constrained_columns = [ preparer._unquote_identifier(x) for x in re.split(r'\s*,\s*', constrained_columns) ] if referred_schema: referred_schema = preparer._unquote_identifier( referred_schema) referred_table = preparer._unquote_identifier(referred_table) referred_columns = [ preparer._unquote_identifier(x) for x in re.split(r'\s*,\s', referred_columns) ] refspec = [] if referred_schema is not None: schema.Table(referred_table, table.metadata, autoload=True, schema=referred_schema, autoload_with=connection) for column in referred_columns: refspec.append(".".join( [referred_schema, referred_table, column])) else: schema.Table(referred_table, table.metadata, autoload=True, autoload_with=connection) for column in referred_columns: refspec.append(".".join([referred_table, column])) table.append_constraint( schema.ForeignKeyConstraint(constrained_columns, refspec, conname))
def reflecttable(self, connection, table, include_columns): c = connection.execute( "select distinct OWNER from systables where tabname=?", table.name.lower()) rows = c.fetchall() if not rows: raise exceptions.NoSuchTableError(table.name) else: if table.owner is not None: if table.owner.lower() in [r[0] for r in rows]: owner = table.owner.lower() else: raise exceptions.AssertionError( "Specified owner %s does not own table %s" % (table.owner, table.name)) else: if len(rows) == 1: owner = rows[0][0] else: raise exceptions.AssertionError( "There are multiple tables with name %s in the schema, you must specifie owner" % table.name) c = connection.execute( """select colname , coltype , collength , t3.default , t1.colno from syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3 where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t3.tabid = t2.tabid and t3.colno = t1.colno order by t1.colno""", table.name.lower(), owner) rows = c.fetchall() if not rows: raise exceptions.NoSuchTableError(table.name) for name, colattr, collength, default, colno in rows: name = name.lower() if include_columns and name not in include_columns: continue # in 7.31, coltype = 0x000 # ^^-- column type # ^-- 1 not null , 0 null nullable, coltype = divmod(colattr, 256) if coltype not in (0, 13) and default: default = default.split()[-1] if coltype == 0 or coltype == 13: # char , varchar coltype = ischema_names.get(coltype, InfoString)(collength) if default: default = "'%s'" % default elif coltype == 5: # decimal precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF if scale == 255: scale = 0 coltype = InfoNumeric(precision, scale) else: try: coltype = ischema_names[coltype] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (coltype, name)) coltype = sqltypes.NULLTYPE colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column( schema.Column(name, coltype, nullable=(nullable == 0), *colargs)) # FK c = connection.execute( """select t1.constrname as cons_name , t1.constrtype as cons_type , t4.colname as local_column , t7.tabname as remote_table , t6.colname as remote_column from sysconstraints as t1 , systables as t2 , sysindexes as t3 , syscolumns as t4 , sysreferences as t5 , syscolumns as t6 , systables as t7 , sysconstraints as t8 , sysindexes as t9 where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R' and t3.tabid = t2.tabid and t3.idxname = t1.idxname and t4.tabid = t2.tabid and t4.colno = t3.part1 and t5.constrid = t1.constrid and t8.constrid = t5.primary and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname = t8.idxname and t7.tabid = t5.ptabid""", table.name.lower(), owner) rows = c.fetchall() fks = {} for cons_name, cons_type, local_column, remote_table, remote_column in rows: try: fk = fks[cons_name] except KeyError: fk = ([], []) fks[cons_name] = fk refspec = ".".join([remote_table, remote_column]) schema.Table(remote_table, table.metadata, autoload=True, autoload_with=connection) if local_column not in fk[0]: fk[0].append(local_column) if refspec not in fk[1]: fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint( schema.ForeignKeyConstraint(value[0], value[1], None)) # PK c = connection.execute( """select t1.constrname as cons_name , t1.constrtype as cons_type , t4.colname as local_column from sysconstraints as t1 , systables as t2 , sysindexes as t3 , syscolumns as t4 where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'P' and t3.tabid = t2.tabid and t3.idxname = t1.idxname and t4.tabid = t2.tabid and t4.colno = t3.part1""", table.name.lower(), owner) rows = c.fetchall() for cons_name, cons_type, local_column in rows: table.primary_key.add(table.c[local_column])
def reflecttable(connection, table, ischema_names): key_constraints = pg_key_constraints if table.schema is not None: current_schema = table.schema else: current_schema = connection.default_schema_name() s = select([columns], sql.and_(columns.c.table_name==table.name, columns.c.table_schema==current_schema), order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break #print "row! " + repr(row) # continue found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = ( row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default] ) args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = ischema_names[type] #print "coltype " + repr(coltype) + " args " + repr(args) coltype = coltype(*args) colargs= [] if default is not None: colargs.append(PassiveDefault(sql.text(default))) table.append_item(schema.Column(name, coltype, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # we are relying on the natural ordering of the constraint_column_usage table to return the referenced columns # in an order that corresponds to the ordinal_position in the key_constraints table, otherwise composite foreign keys # wont reflect properly. dont see a way around this based on whats available from information_schema s = select([constraints.c.constraint_name, constraints.c.constraint_type, constraints.c.table_name, key_constraints], use_labels=True, from_obj=[constraints.join(column_constraints, column_constraints.c.constraint_name==constraints.c.constraint_name).join(key_constraints, key_constraints.c.constraint_name==column_constraints.c.constraint_name)], order_by=[key_constraints.c.ordinal_position]) s.append_column(column_constraints) s.append_whereclause(constraints.c.table_name==table.name) s.append_whereclause(constraints.c.table_schema==current_schema) colmap = [constraints.c.constraint_type, key_constraints.c.column_name, column_constraints.c.table_schema, column_constraints.c.table_name, column_constraints.c.column_name, constraints.c.constraint_name, key_constraints.c.ordinal_position] c = connection.execute(s) fks = {} while True: row = c.fetchone() if row is None: break (type, constrained_column, referred_schema, referred_table, referred_column, constraint_name, ordinal_position) = ( row[colmap[0]], row[colmap[1]], row[colmap[2]], row[colmap[3]], row[colmap[4]], row[colmap[5]], row[colmap[6]] ) #print "type %s on column %s to remote %s.%s.%s" % (type, constrained_column, referred_schema, referred_table, referred_column) if type=='PRIMARY KEY': table.c[constrained_column]._set_primary_key() elif type=='FOREIGN KEY': try: fk = fks[constraint_name] except KeyError: fk = ([],[]) fks[constraint_name] = fk if current_schema == referred_schema: referred_schema = table.schema if referred_schema is not None: refspec = ".".join([referred_schema, referred_table, referred_column]) else: refspec = ".".join([referred_table, referred_column]) if constrained_column not in fk[0]: fk[0].append(constrained_column) if refspec not in fk[1]: fk[1].append(refspec) for name, value in fks.iteritems(): table.append_item(ForeignKeyConstraint(value[0], value[1], name=name))
def reflecttable(self, connection, table): preparer = self.identifier_preparer if not preparer.should_quote(table): name = table.name.upper() else: name = table.name c = connection.execute ("select distinct OWNER from ALL_TAB_COLUMNS where TABLE_NAME = :table_name", {'table_name':name}) rows = c.fetchall() if not rows : raise exceptions.NoSuchTableError(table.name) else: if table.owner is not None: if table.owner.upper() in [r[0] for r in rows]: owner = table.owner.upper() else: raise exceptions.AssertionError("Specified owner %s does not own table %s"%(table.owner, table.name)) else: if len(rows)==1: owner = rows[0][0] else: raise exceptions.AssertionError("There are multiple tables with name %s in the schema, you must specifie owner"%table.name) c = connection.execute ("select COLUMN_NAME, DATA_TYPE, DATA_LENGTH, DATA_PRECISION, DATA_SCALE, NULLABLE, DATA_DEFAULT from ALL_TAB_COLUMNS where TABLE_NAME = :table_name and OWNER = :owner", {'table_name':name, 'owner':owner}) while True: row = c.fetchone() if row is None: break found_table = True #print "ROW:" , row (name, coltype, length, precision, scale, nullable, default) = (row[0], row[1], row[2], row[3], row[4], row[5]=='Y', row[6]) # INTEGER if the scale is 0 and precision is null # NUMBER if the scale and precision are both null # NUMBER(9,2) if the precision is 9 and the scale is 2 # NUMBER(3) if the precision is 3 and scale is 0 #length is ignored except for CHAR and VARCHAR2 if coltype=='NUMBER' : if precision is None and scale is None: coltype = OracleNumeric elif precision is None and scale == 0 : coltype = OracleInteger else : coltype = OracleNumeric(precision, scale) elif coltype=='CHAR' or coltype=='VARCHAR2': coltype = ischema_names.get(coltype, OracleString)(length) else: coltype = re.sub(r'\(\d+\)', '', coltype) try: coltype = ischema_names[coltype] except KeyError: raise exceptions.AssertionError("Cant get coltype for type '%s'" % coltype) colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) # if name comes back as all upper, assume its case folded if (name.upper() == name): name = name.lower() table.append_column(schema.Column(name, coltype, nullable=nullable, *colargs)) c = connection.execute(constraintSQL, {'table_name' : table.name.upper(), 'owner' : owner}) fks = {} while True: row = c.fetchone() if row is None: break #print "ROW:" , row (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = row if cons_type == 'P': table.primary_key.add(table.c[local_column]) elif cons_type == 'R': try: fk = fks[cons_name] except KeyError: fk = ([], []) fks[cons_name] = fk refspec = ".".join([remote_table, remote_column]) schema.Table(remote_table, table.metadata, autoload=True, autoload_with=connection, owner=remote_owner) if local_column not in fk[0]: fk[0].append(local_column) if refspec not in fk[1]: fk[1].append(refspec) for name, value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))
def reflecttable(self, connection, table, include_columns): # This is defined in the function, as it relies on win32com constants, # that aren't imported until dbapi method is called if not hasattr(self, 'ischema_names'): self.ischema_names = { const.dbByte: AcBinary, const.dbInteger: AcInteger, const.dbLong: AcInteger, const.dbSingle: AcFloat, const.dbDouble: AcFloat, const.dbDate: AcDateTime, const.dbLongBinary: AcBinary, const.dbMemo: AcText, const.dbBoolean: AcBoolean, const.dbText: AcUnicode, # All Access strings are unicode } # A fresh DAO connection is opened for each reflection # This is necessary, so we get the latest updates dtbs = daoEngine.OpenDatabase(connection.engine.url.database) try: for tbl in dtbs.TableDefs: if tbl.Name.lower() == table.name.lower(): break else: raise exceptions.NoSuchTableError(table.name) for col in tbl.Fields: coltype = self.ischema_names[col.Type] if col.Type == const.dbText: coltype = coltype(col.Size) colargs = \ { 'nullable': not(col.Required or col.Attributes & const.dbAutoIncrField), } default = col.DefaultValue if col.Attributes & const.dbAutoIncrField: colargs['default'] = schema.Sequence(col.Name + '_seq') elif default: if col.Type == const.dbBoolean: default = default == 'Yes' and '1' or '0' colargs['default'] = schema.PassiveDefault( sql.text(default)) table.append_column(schema.Column(col.Name, coltype, **colargs)) # TBD: check constraints # Find primary key columns first for idx in tbl.Indexes: if idx.Primary: for col in idx.Fields: thecol = table.c[col.Name] table.primary_key.add(thecol) if isinstance(thecol.type, AcInteger) and \ not (thecol.default and isinstance(thecol.default.arg, schema.Sequence)): thecol.autoincrement = False # Then add other indexes for idx in tbl.Indexes: if not idx.Primary: if len(idx.Fields) == 1: col = table.c[idx.Fields[0].Name] if not col.primary_key: col.index = True col.unique = idx.Unique else: pass # TBD: multi-column indexes for fk in dtbs.Relations: if fk.ForeignTable != table.name: continue scols = [c.ForeignName for c in fk.Fields] rcols = ['%s.%s' % (fk.Table, c.Name) for c in fk.Fields] table.append_constraint( schema.ForeignKeyConstraint(scols, rcols)) finally: dtbs.Close()
def reflecttable(self, connection, table): #TODO: map these better column_func = { 14 : lambda r: sqltypes.String(r['FLEN']), # TEXT 7 : lambda r: sqltypes.Integer(), # SHORT 8 : lambda r: sqltypes.Integer(), # LONG 9 : lambda r: sqltypes.Float(), # QUAD 10 : lambda r: sqltypes.Float(), # FLOAT 27 : lambda r: sqltypes.Float(), # DOUBLE 35 : lambda r: sqltypes.DateTime(), # TIMESTAMP 37 : lambda r: sqltypes.String(r['FLEN']), # VARYING 261: lambda r: sqltypes.TEXT(), # BLOB 40 : lambda r: sqltypes.Char(r['FLEN']), # CSTRING 12 : lambda r: sqltypes.Date(), # DATE 13 : lambda r: sqltypes.Time(), # TIME 16 : lambda r: sqltypes.Numeric(precision=r['FPREC'], length=r['FSCALE'] * -1) #INT64 } tblqry = """ SELECT DISTINCT R.RDB$FIELD_NAME AS FNAME, R.RDB$NULL_FLAG AS NULL_FLAG, R.RDB$FIELD_POSITION, F.RDB$FIELD_TYPE AS FTYPE, F.RDB$FIELD_SUB_TYPE AS STYPE, F.RDB$FIELD_LENGTH AS FLEN, F.RDB$FIELD_PRECISION AS FPREC, F.RDB$FIELD_SCALE AS FSCALE FROM RDB$RELATION_FIELDS R JOIN RDB$FIELDS F ON R.RDB$FIELD_SOURCE=F.RDB$FIELD_NAME WHERE F.RDB$SYSTEM_FLAG=0 and R.RDB$RELATION_NAME=? ORDER BY R.RDB$FIELD_POSITION""" keyqry = """ SELECT SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDEX_SEGMENTS SE ON RC.RDB$INDEX_NAME=SE.RDB$INDEX_NAME WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=?""" fkqry = """ SELECT RC.RDB$CONSTRAINT_NAME CNAME, CSE.RDB$FIELD_NAME FNAME, IX2.RDB$RELATION_NAME RNAME, SE.RDB$FIELD_NAME SENAME FROM RDB$RELATION_CONSTRAINTS RC JOIN RDB$INDICES IX1 ON IX1.RDB$INDEX_NAME=RC.RDB$INDEX_NAME JOIN RDB$INDICES IX2 ON IX2.RDB$INDEX_NAME=IX1.RDB$FOREIGN_KEY JOIN RDB$INDEX_SEGMENTS CSE ON CSE.RDB$INDEX_NAME=IX1.RDB$INDEX_NAME JOIN RDB$INDEX_SEGMENTS SE ON SE.RDB$INDEX_NAME=IX2.RDB$INDEX_NAME AND SE.RDB$FIELD_POSITION=CSE.RDB$FIELD_POSITION WHERE RC.RDB$CONSTRAINT_TYPE=? AND RC.RDB$RELATION_NAME=? ORDER BY SE.RDB$INDEX_NAME, SE.RDB$FIELD_POSITION""" # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", table.name.upper()]) pkfields =[r['SENAME'] for r in c.fetchall()] # get all of the fields for this table def lower_if_possible(name): # Remove trailing spaces: FB uses a CHAR() type, # that is padded with spaces name = name.rstrip() # If its composed only by upper case chars, use # the lowered version, otherwise keep the original # (even if stripped...) lname = name.lower() if lname.upper() == name and not ' ' in name: return lname return name c = connection.execute(tblqry, [table.name.upper()]) row = c.fetchone() if not row: raise exceptions.NoSuchTableError(table.name) while row: name = row['FNAME'] args = [lower_if_possible(name)] kw = {} # get the data types and lengths args.append(column_func[row['FTYPE']](row)) # is it a primary key? kw['primary_key'] = name in pkfields table.append_column(schema.Column(*args, **kw)) row = c.fetchone() # get the foreign keys c = connection.execute(fkqry, ["FOREIGN KEY", table.name.upper()]) fks = {} while True: row = c.fetchone() if not row: break cname = lower_if_possible(row['CNAME']) try: fk = fks[cname] except KeyError: fks[cname] = fk = ([], []) rname = lower_if_possible(row['RNAME']) schema.Table(rname, table.metadata, autoload=True, autoload_with=connection) fname = lower_if_possible(row['FNAME']) refspec = rname + '.' + lower_if_possible(row['SENAME']) fk[0].append(fname) fk[1].append(refspec) for name,value in fks.iteritems(): table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))
def reflecttable(self, connection, table, include_columns=None): """ Inputs: - sqlalchemy.engine.base.Connection object has a <connection> reference to sqlalchemy.pool._ConnectionFairy which has a <connection> reference to sqlalchemy.databases.ibm_db_dbi.Connection, the actual DBAPI driver connection handler - table object - include_columns (a list or set) limits the autoload to the given column names. """ dialect.logger.debug("\n *** IBM_DBDialect::reflecttable( " + str(table) + ', ' + str(include_columns) + ' )') ibm_dbi_conn = connection.connection.connection schema_name = self.get_default_schema_name(connection) if table.schema is not None: ibm_dbi_conn.set_current_schema(table.schema) schema_name = table.schema # Append columns to table columns = ibm_dbi_conn.columns(schema_name, table.name, include_columns) if not columns: raise exceptions.NoSuchTableError(table.name) for col in columns: (tab_name, col_name, col_id, col_type, def_value, is_nullable, col_size, char_len, num_prec, num_scale) = (col['TABLE_NAME'].lower(), col['COLUMN_NAME'].lower(), col['ORDINAL_POSITION'], col['TYPE_NAME'], col['COLUMN_DEF'], col['IS_NULLABLE'] == 'YES', col['COLUMN_SIZE'], col['CHAR_OCTET_LENGTH'], col['NUM_PREC_RADIX'], col['DECIMAL_DIGITS']) col_args = [] if def_value is not None: col_args.append(schema.PassiveDefault(sql.text(def_value))) type = ischema_names.get(col_type, None) column = schema.Column(col_name, type, nullable=is_nullable, *col_args) dialect.logger.debug("\n *** column: " + repr(column)) table.append_column(column) # Define table's primary keys pkeys = ibm_dbi_conn.primary_keys(True, schema_name, table.name) for pkey in pkeys: (pk_schema, pk_table, pk_column, pk_name, key_seq) = (pkey['TABLE_SCHEM'].lower(), pkey['TABLE_NAME'].lower(), pkey['COLUMN_NAME'].lower(), pkey['PK_NAME'].lower(), pkey['KEY_SEQ']) table.primary_key.add(table.c[pk_column]) # Define table's other indexes indexes = ibm_dbi_conn.indexes(True, schema_name, table.name) for idx in indexes: (idx_schema, idx_table, idx_col, idx_name, idx_id, idx_type, is_unique, ascendent) = (idx['TABLE_SCHEM'].lower(), idx['TABLE_NAME'].lower(), idx['COLUMN_NAME'].lower(), idx['INDEX_NAME'].lower(), idx['ORDINAL_POSITION'], idx['TYPE'], idx['NON_UNIQUE'] == 0, idx['ASC_OR_DESC'] == 'A') dialect.logger.debug( "\n *** IBM_DBDialect::reflecttable: indexes: " + str(idx)) # Define table's foreign keys fkeys = ibm_dbi_conn.foreign_keys(True, schema_name, table.name) for fkey in fkeys: (pk_schema, pk_table, pk_column, pk_name, key_seq, fk_schema, fk_table, fk_column, fk_name) = (fkey['PKTABLE_SCHEM'].lower(), fkey['PKTABLE_NAME'].lower(), fkey['PKCOLUMN_NAME'].lower(), fkey['PK_NAME'].lower(), fkey['KEY_SEQ'], fkey['FKTABLE_SCHEM'].lower(), fkey['FKTABLE_NAME'].lower(), fkey['FKCOLUMN_NAME'].lower(), fkey['FK_NAME'].lower()) table.append_constraint( schema.ForeignKeyConstraint(['%s' % (fk_column)], ['%s.%s' % (pk_table, pk_column)])) dialect.logger.debug("\n *** IBM_DBDialect::reflecttable: table: " + repr(table))
def reflecttable(self, connection, table): import sqlalchemy.databases.information_schema as ischema # Get base columns if table.schema is not None: current_schema = table.schema else: current_schema = self.get_default_schema_name() columns = self.uppercase_table(ischema.columns) s = sql.select([columns], current_schema and sql.and_(columns.c.table_name == table.name, columns.c.table_schema == current_schema) or columns.c.table_name == table.name, order_by=[columns.c.ordinal_position]) c = connection.execute(s) found_table = False while True: row = c.fetchone() if row is None: break found_table = True (name, type, nullable, charlen, numericprec, numericscale, default) = (row[columns.c.column_name], row[columns.c.data_type], row[columns.c.is_nullable] == 'YES', row[columns.c.character_maximum_length], row[columns.c.numeric_precision], row[columns.c.numeric_scale], row[columns.c.column_default]) args = [] for a in (charlen, numericprec, numericscale): if a is not None: args.append(a) coltype = ischema_names[type] coltype = coltype(*args) colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) table.append_column( schema.Column(name, coltype, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) # We also run an sp_columns to check for identity columns: # FIXME: note that this only fetches the existence of an identity column, not it's properties like (seed, increment) # also, add a check to make sure we specify the schema name of the table # cursor = table.engine.execute("sp_columns " + table.name, {}) cursor = connection.execute("sp_columns " + table.name) while True: row = cursor.fetchone() if row is None: break col_name, type_name = row[3], row[5] if type_name.endswith("identity"): ic = table.c[col_name] # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute ic.sequence = schema.Sequence(ic.name + '_identity') # Add constraints RR = self.uppercase_table( ischema.ref_constraints ) #information_schema.referential_constraints TC = self.uppercase_table( ischema.constraints) #information_schema.table_constraints C = self.uppercase_table(ischema.column_constraints).alias( 'C' ) #information_schema.constraint_column_usage: the constrained column R = self.uppercase_table(ischema.column_constraints).alias( 'R' ) #information_schema.constraint_column_usage: the referenced column # Primary key constraints s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name, C.c.table_name == table.name)) c = connection.execute(s) for row in c: if 'PRIMARY' in row[TC.c.constraint_type.name]: table.primary_key.add(table.c[row[0]]) # Foreign key constraints s = sql.select([ C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name, RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule ], sql.and_( C.c.table_name == table.name, C.c.constraint_name == RR.c.constraint_name, R.c.constraint_name == RR.c.unique_constraint_name), order_by=[RR.c.constraint_name]) rows = connection.execute(s).fetchall() # group rows by constraint ID, to handle multi-column FKs fknm, scols, rcols = (None, [], []) for r in rows: scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r if rfknm != fknm: if fknm: table.append_constraint( schema.ForeignKeyConstraint( scols, ['%s.%s' % (t, c) for (s, t, c) in rcols], fknm)) fknm, scols, rcols = (rfknm, [], []) if (not scol in scols): scols.append(scol) if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol)) if fknm and scols: table.append_constraint( schema.ForeignKeyConstraint( scols, ['%s.%s' % (t, c) for (s, t, c) in rcols], fknm))
def reflecttable(self, connection, table): c = connection.execute( "select COLUMN_NAME, DATA_TYPE, DATA_LENGTH, DATA_PRECISION, DATA_SCALE, NULLABLE, DATA_DEFAULT from ALL_TAB_COLUMNS where TABLE_NAME = :table_name", {'table_name': table.name.upper()}) found_table = False while True: row = c.fetchone() if row is None: break found_table = True #print "ROW:" , row (name, coltype, length, precision, scale, nullable, default) = (row[0], row[1], row[2], row[3], row[4], row[5] == 'Y', row[6]) # INTEGER if the scale is 0 and precision is null # NUMBER if the scale and precision are both null # NUMBER(9,2) if the precision is 9 and the scale is 2 # NUMBER(3) if the precision is 3 and scale is 0 #length is ignored except for CHAR and VARCHAR2 if coltype == 'NUMBER': if precision is None and scale is None: coltype = OracleNumeric elif precision is None and scale == 0: coltype = OracleInteger else: coltype = OracleNumeric(precision, scale) elif coltype == 'CHAR' or coltype == 'VARCHAR2': coltype = ischema_names.get(coltype, OracleString)(length) else: coltype = ischema_names.get(coltype) colargs = [] if default is not None: colargs.append(schema.PassiveDefault(sql.text(default))) name = name.lower() table.append_item( schema.Column(name, coltype, nullable=nullable, *colargs)) if not found_table: raise exceptions.NoSuchTableError(table.name) c = connection.execute(constraintSQL, {'table_name': table.name.upper()}) while True: row = c.fetchone() if row is None: break #print "ROW:" , row (cons_name, cons_type, local_column, remote_table, remote_column) = row if cons_type == 'P': table.c[local_column]._set_primary_key() elif cons_type == 'R': table.c[local_column].append_item( schema.ForeignKey( schema.Table(remote_table, table.metadata, autoload=True).c[remote_column]))