def dump(args, tablenames=None): """ """ # complete schema if not tablenames: app.log.debug("dumping metadata of all tables in the CREDO schema.") for table in metadata.tables.values(): print CreateTable(table, on='postgresql', bind=metadata.bind) for index in table.indexes: print CreateIndex(index, on='postgresql', bind=metadata.bind) # only specific tables else: # check if the given tables are are defined in CREDO for tablename in tablenames: if tablename not in metadata.tables: app.log.fatal( "table {0} is not defined in CREDO".format(tablename)) app.close() else: table = metadata.tables[tablename] print CreateTable(table, on='postgresql', bind=metadata.bind) for index in table.indexes: print CreateIndex(index, on='postgresql', bind=metadata.bind)
def test_index_reflection_filtered_and_clustered( self, metadata, connection ): """ table with one filtered index and one clustered index so each index will have different dialect_options keys """ t1 = Table( "t", metadata, Column("id", Integer), Column("x", types.String(20)), Column("y", types.Integer), ) Index("idx_x", t1.c.x, mssql_clustered=True) Index("idx_y", t1.c.y, mssql_where=t1.c.y >= 5) metadata.create_all(connection) ind = testing.db.dialect.get_indexes(connection, "t", None) clustered_index = "" for ix in ind: if ix["dialect_options"]["mssql_clustered"]: clustered_index = ix["name"] eq_(clustered_index, "idx_x") filtered_indexes = [] for ix in ind: if "dialect_options" in ix: if "mssql_where" in ix["dialect_options"]: filtered_indexes.append( ix["dialect_options"]["mssql_where"] ) eq_(sorted(filtered_indexes), ["([y]>=(5))"]) t2 = Table("t", MetaData(), autoload_with=connection) clustered_idx = list( sorted(t2.indexes, key=lambda clustered_idx: clustered_idx.name) )[0] filtered_idx = list( sorted(t2.indexes, key=lambda filtered_idx: filtered_idx.name) )[1] self.assert_compile( CreateIndex(clustered_idx), "CREATE CLUSTERED INDEX idx_x ON t (x)" ) self.assert_compile( CreateIndex(filtered_idx), "CREATE NONCLUSTERED INDEX idx_y ON t (y) WHERE ([y]>=(5))", )
def test_reflect_fulltext(self, metadata, connection): mt = Table( "mytable", metadata, Column("id", Integer, primary_key=True), Column("textdata", String(50)), mariadb_engine="InnoDB", mysql_engine="InnoDB", ) Index( "textdata_ix", mt.c.textdata, mysql_prefix="FULLTEXT", mariadb_prefix="FULLTEXT", ) metadata.create_all(connection) mt = Table("mytable", MetaData(), autoload_with=testing.db) idx = list(mt.indexes)[0] eq_(idx.name, "textdata_ix") eq_(idx.dialect_options[testing.db.name]["prefix"], "FULLTEXT") self.assert_compile( CreateIndex(idx), "CREATE FULLTEXT INDEX textdata_ix ON mytable (textdata)", )
def dump_sql_command(args): # type: (argparse.Namespace) -> None db_engine = get_db_engine(get_database_url(settings)) for table in Model.metadata.sorted_tables: print CreateTable(table).compile(db_engine) for index in table.indexes: print CreateIndex(index).compile(db_engine)
def test_reflect_fulltext_comment( self, metadata, connection, ): mt = Table( "mytable", metadata, Column("id", Integer, primary_key=True), Column("textdata", String(50)), mysql_engine="InnoDB", ) Index( "textdata_ix", mt.c.textdata, mysql_prefix="FULLTEXT", mysql_with_parser="ngram", ) metadata.create_all(connection) mt = Table("mytable", MetaData(), autoload_with=connection) idx = list(mt.indexes)[0] eq_(idx.name, "textdata_ix") eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT") eq_(idx.dialect_options["mysql"]["with_parser"], "ngram") self.assert_compile( CreateIndex(idx), "CREATE FULLTEXT INDEX textdata_ix ON mytable " "(textdata) WITH PARSER ngram", )
def test_index_reflection_nonclustered(self, metadata, connection): """ one index created by specifying mssql_clustered=False one created without specifying mssql_clustered property so it will use default of NONCLUSTERED. When reflected back mssql_clustered=False should be included in both """ t1 = Table( "t", metadata, Column("id", Integer), Column("x", types.String(20)), Column("y", types.Integer), ) Index("idx_x", t1.c.x, mssql_clustered=False) Index("idx_y", t1.c.y) metadata.create_all(connection) ind = testing.db.dialect.get_indexes(connection, "t", None) for ix in ind: assert ix["dialect_options"]["mssql_clustered"] == False t2 = Table("t", MetaData(), autoload_with=connection) idx = list(sorted(t2.indexes, key=lambda idx: idx.name))[0] self.assert_compile( CreateIndex(idx), "CREATE NONCLUSTERED INDEX idx_x ON t (x)" )
def _add_constraint_in_db( self, constraint: ConstraintOrIndex, errors: str = 'raise', ) -> None: assert errors in _VALID_ERRORS_OPTIONS if self._constraint_already_active(constraint): return if constraint.table.name not in self._reflected_table_lookup: logger.warning(f'Cannot add {constraint.name}, ' f'table {constraint.table.name} does not exist') return with self._db.engine.connect() as conn: logger.info(f'Adding {constraint.name}') try: if isinstance(constraint, Index): conn.execute(CreateIndex(constraint)) else: # We add a copy instead of the original constraint. # Otherwise, when you later call metadata.create_all # to create tables, SQLAlchemy thinks the # constraints have already been created and skips # them. c = copy(constraint) conn.execute(AddConstraint(c)) except SQLAlchemyError: if errors == 'raise': raise elif errors == 'ignore': logger.info(f'Unable to add {constraint.name}')
def GetTables(host, database, user, password, schema='public'): connection_string = PG_CONN_STR.format(hostName=host, dbName=database, userName=user, password=password) engine = create_engine(connection_string, client_encoding='utf8') conn = engine.connect() metadata = MetaData(bind=engine, schema=schema) metadata.reflect(bind=engine) tables = [] for table in metadata.sorted_tables: # create table sql = str(CreateTable(table, bind=engine)).strip() + ';\n' # comment on table comment = conn.execute( text(SQL_TABLE_COMMENT.format(TableName=table.name))).fetchall() sql += (SQL_COMMENT_ON_TABLE.format( Schema=table.schema, TableName=table.name, Comment=comment[0][2]) + ';\n') if comment[0][2] else '' # create indexes sorted_indexes = sorted(table.indexes, key=lambda ind: ind.name) for index in sorted_indexes: sql += str(CreateIndex(index, bind=engine)) + ';\n' # create triggers triggers = conn.execute( text(SQL_TABLE_TRIGGERS.format(TableName=table.name))).fetchall() for trigger in triggers: sql += trigger[2] + ';\n' tables.append((table.schema, table.name, sql)) return tables
def test_index_reflection_clustered(self, metadata, connection): """ when the result of get_indexes() is used to build an index it should include the CLUSTERED keyword when appropriate """ t1 = Table( "t", metadata, Column("id", Integer), Column("x", types.String(20)), Column("y", types.Integer), ) Index("idx_x", t1.c.x, mssql_clustered=True) Index("idx_y", t1.c.y) metadata.create_all(connection) ind = testing.db.dialect.get_indexes(connection, "t", None) clustered_index = "" for ix in ind: if ix["dialect_options"]["mssql_clustered"]: clustered_index = ix["name"] eq_(clustered_index, "idx_x") t2 = Table("t", MetaData(), autoload_with=connection) idx = list(sorted(t2.indexes, key=lambda idx: idx.name))[0] self.assert_compile( CreateIndex(idx), "CREATE CLUSTERED INDEX idx_x ON t (x)" )
def index_ddl(tables, engine, drop=False): output = [] for table in tables: for index in table.indexes: if not drop: ddl = CreateIndex(index) else: ddl = DropIndex(index) output.append(str(ddl.compile(dialect=engine.dialect)).strip()) output.append(';\n\n') return output
def test_indexes_with_filtered(self, metadata, connection): t1 = Table( "t", metadata, Column("id", Integer, primary_key=True), Column("x", types.String(20)), Column("y", types.Integer), ) Index("idx_x", t1.c.x, mssql_where=t1.c.x == "test") Index("idx_y", t1.c.y, mssql_where=t1.c.y >= 5) metadata.create_all(connection) ind = testing.db.dialect.get_indexes(connection, "t", None) filtered_indexes = [] for ix in ind: if "dialect_options" in ix: filtered_indexes.append(ix["dialect_options"]["mssql_where"]) eq_(sorted(filtered_indexes), ["([x]='test')", "([y]>=(5))"]) t2 = Table("t", MetaData(), autoload_with=connection) idx = list(sorted(t2.indexes, key=lambda idx: idx.name))[0] self.assert_compile(CreateIndex(idx), "CREATE INDEX idx_x ON t (x) WHERE ([x]='test')")
def create_index(table: Table): creates = {} for index in table.indexes: index: Index stmt = CreateIndex(index).compile(dialect=sqlite.dialect()) stmt = str(stmt).replace('INDEX', 'INDEX IF NOT EXISTS').strip() creates[index.name] = stmt return creates
def dump_schema(self): # type: () -> str db_engine = get_db_engine(self.settings.database) sql = StringIO() for table in Model.metadata.sorted_tables: sql.write(str(CreateTable(table).compile(db_engine))) for index in table.indexes: sql.write(str(CreateIndex(index).compile(db_engine))) return sql.getvalue()
def create_index(self, index): # this likely defaults to None if not present, so get() # should normally not return the default value. being # defensive in any case mssql_include = index.kwargs.get("mssql_include", None) or () for col in mssql_include: if col not in index.table.c: index.table.append_column(Column(col, sqltypes.NullType)) self._exec(CreateIndex(index))
def index_ddl(tables, engine, drop=False): output = [] for table in tables: indexes = sorted(list(table.indexes), key=lambda k: k.name, reverse=drop) for index in indexes: if not drop: ddl = CreateIndex(index) else: ddl = DropIndex(index) output.append(str(ddl.compile(dialect=engine.dialect)).strip()) output.append(';\n\n') return output
def _assert_impl(self, impl, colnames=None, ddl_contains=None, ddl_not_contains=None, dialect='default'): context = op_fixture(dialect=dialect) impl._create(context.impl) if colnames is None: colnames = ['id', 'x', 'y'] eq_(impl.new_table.c.keys(), colnames) pk_cols = [col for col in impl.new_table.c if col.primary_key] eq_(list(impl.new_table.primary_key), pk_cols) create_stmt = str( CreateTable(impl.new_table).compile(dialect=context.dialect)) create_stmt = re.sub(r'[\n\t]', '', create_stmt) idx_stmt = "" for idx in impl.new_table.indexes: idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect)) idx_stmt = re.sub(r'[\n\t]', '', idx_stmt) if ddl_contains: assert ddl_contains in create_stmt + idx_stmt if ddl_not_contains: assert ddl_not_contains not in create_stmt + idx_stmt expected = [ create_stmt, ] if impl.new_table.indexes: expected.append(idx_stmt) expected.extend([ 'INSERT INTO _alembic_batch_temp (%(colnames)s) ' 'SELECT %(tname_colnames)s FROM tname' % { "colnames": ", ".join([ impl.new_table.c[name].name for name in colnames if name in impl.table.c ]), "tname_colnames": ", ".join("CAST(tname.%s AS %s) AS anon_1" % (name, impl.new_table.c[name].type) if (impl.new_table.c[name].type is not impl.table. c[name].type) else "tname.%s" % name for name in colnames if name in impl.table.c) }, 'DROP TABLE tname', 'ALTER TABLE _alembic_batch_temp RENAME TO tname' ]) context.assert_(*expected) return impl.new_table
def add_if_not_exists_clause(index, connectable): """Add "IF NOT EXISTS" clause to create index statement. I don't know why but ``sqlalchemy.Index.create()`` does not take a ``checkfirst`` argument like the rest of others. """ stmt = str(CreateIndex(index).compile(connectable)) stmt = stmt.replace('CREATE INDEX', 'CREATE INDEX IF NOT EXISTS', 1) ASSERT.in_('IF NOT EXISTS', stmt) return stmt
def handle(self, options, global_options, *args): from sqlalchemy.schema import CreateTable, CreateIndex engine = get_engine(options, global_options) tables = get_sorted_tables(get_tables(global_options.apps_dir, tables=args, engine_name=options.engine, settings_file=global_options.settings, local_settings_file=global_options.local_settings)) for name, t in tables: print "%s;" % str(CreateTable(t)).rstrip() for x in t.indexes: print "%s;" % CreateIndex(x)
def create_table_indexes_ddl(tableObj, engine, debug_level=-1): ddl_text = '' indexes = sorted(list(tableObj.indexes), key=lambda k: k.name, reverse=False) for index in indexes: ddl_obj = CreateIndex(index) try: ddl_string = str(ddl_obj.compile(dialect=engine.dialect)).strip() except Exception as e: # if e.message.find('it has no name')<0: # print(e) ddl_string = '' if ddl_string: if not ddl_text: ddl_text = ddl_string else: ddl_text = ddl_text + '\n' + ddl_string ddl_string = ddl_text if int(debug_level) > 0: msg = f"table [[{tableObj.name}]] create_table_indexes_DDL: [{ddl_string}]" log_message(msg) return ddl_string
def handle(self, options, global_options, *args): from sqlalchemy.schema import CreateTable, CreateIndex engine = get_engine(options, global_options) tables = get_sorted_tables(get_tables(global_options.apps_dir, args, engine_name=options.engine, settings_file=global_options.settings, local_settings_file=global_options.local_settings)) for name, t in tables: if t.__mapping_only__: continue print("{};".format(safe_str(u(CreateTable(t).compile(dialect=engine.dialect))))) for x in t.indexes: print("{};".format(CreateIndex(x)))
def handle(self, options, global_options, *args): from sqlalchemy.schema import CreateTable, CreateIndex if not args: print "Failed! You should pass one or more tables name." sys.exit(1) engine = get_engine(options, global_options) tables = get_sorted_tables(get_tables(global_options.apps_dir, args, engine_name=options.engine, settings_file=global_options.settings, local_settings_file=global_options.local_settings)) for name, t in tables: print "%s;" % str(CreateTable(t)).rstrip() for x in t.indexes: print "%s;" % CreateIndex(x)
def populate_table_from_select(self, table, sel): if table is None or sel is None: raise ValueError("table or select not set") print( sql_to_string(table.insert().from_select(sel.columns, sel), self.pretty)) print(self.result_util.statement_terminator()) for c in table.columns: bc = c.base_columns if bc != None and len(bc) > 0: bname = list(bc)[0].name else: bname = c.name if bname.endswith(QDMConstants.PATIENT_ID_COL): ixname = 'ix_' + table.name + '_' + QDMConstants.PATIENT_ID_COL print(str(CreateIndex(Index(ixname, c), bind=self.db.engine))) print(self.result_util.statement_terminator()) return table
def _visit_constraint(self, constraint): constraint.name = self.get_constraint_name(constraint) if (isinstance(constraint, UniqueConstraint) and is_unique_constraint_with_null_columns_supported( self.dialect)): for column in constraint: if column.nullable: constraint.exclude_nulls = True break if getattr(constraint, 'exclude_nulls', None): index = Index(constraint.name, *(column for column in constraint), unique=True) sql = self.process(CreateIndex(index)) sql += ' EXCLUDE NULL KEYS' else: sql = self.process(AddConstraint(constraint)) self.append(sql) self.execute()
def print_create_table(tables): app.config.from_object('config.default') database.init_app(app) engine = database.session.get_bind() for class_name in tables: cls = get_class(class_name) for c in cls.__table__.columns: if not isinstance(c.type, Enum): continue t = c.type sql = str(CreateEnumType(t).compile(engine)) click.echo(sql.strip() + ';') for index in cls.__table__.indexes: sql = str(CreateIndex(index).compile(engine)) click.echo(sql.strip() + ';') sql = str(CreateTable(cls.__table__).compile(engine)) click.echo(sql.strip() + ';')
def create_table(self): if not self.schema: self.schema = DB_ETL_SCHEMA if not self.create_schema(): return False logger.info('try to create table {} in {}'.format( self.sql_table_name, self.schema)) if self.exist_table(): return True table = self.get_sql_table_object(need_columns=True) db_table = self.local_engine.execute(CreateTable(table)) for index in table.indexes: self.local_engine.execute(CreateIndex(index)) return db_table
async def create(self): schema_name = self.kwargs.pop('schema_name', 'public') self.engine = await create_engine(**self.kwargs) async with self.engine.acquire() as conn: for table, index in self.tables: exists = await conn.scalar('''SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = '{table_schema}' AND table_name = '{table_name}' );'''.format(table_schema=schema_name, table_name=table.name)) if not exists: tr = await conn.begin() create_statement = str( CreateTable(table).compile(self.engine)) create_statement = create_statement.replace( 'CREATE TABLE', 'CREATE TABLE IF NOT EXISTS') await conn.execute(create_statement) if index is not None: create_index = str( CreateIndex(index).compile(self.engine)) await conn.execute(create_index) await tr.commit()
def handle(self, options, global_options, *args): from sqlalchemy.schema import CreateTable, CreateIndex from sqlalchemy import create_engine if options.dialect and global_options.verbose: print('Create sql with {} dialect'.format(options.dialect)) engine = get_engine(options, global_options) tables = get_sorted_tables(get_tables(global_options.apps_dir, tables=args, engine_name=options.engine, settings_file=global_options.settings, local_settings_file=global_options.local_settings)) for name, t in tables: if t.__mapping_only__: continue if options.dialect: dialect = create_engine('{}://'.format(options.dialect), strategy="mock", executor=None).dialect else: dialect = engine.dialect print("{};".format(str((CreateTable(t).compile(dialect=dialect))).rstrip())) for x in t.indexes: print("{};".format(CreateIndex(x)))
def downgrade(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine if migrate_engine.name not in ['mysql', 'postgresql']: return image_properties = Table('image_properties', meta, autoload=True) image_members = Table('image_members', meta, autoload=True) images = Table('images', meta, autoload=True) if migrate_engine.name == 'postgresql': constraint = UniqueConstraint(image_properties.c.image_id, image_properties.c.name, name='ix_image_properties_image_id_name') migrate_engine.execute(DropConstraint(constraint)) constraint = UniqueConstraint(image_properties.c.image_id, image_properties.c.name) migrate_engine.execute(AddConstraint(constraint)) index = Index('ix_image_properties_image_id_name', image_properties.c.image_id, image_properties.c.name) migrate_engine.execute(CreateIndex(index)) images.c.id.alter( server_default=Sequence('images_id_seq').next_value()) if migrate_engine.name == 'mysql': constraint = UniqueConstraint(image_properties.c.image_id, image_properties.c.name, name='image_id') migrate_engine.execute(AddConstraint(constraint)) image_members.c.status.alter(nullable=True, server_default=None) images.c.protected.alter(nullable=True, server_default=None)
def generate_html(tables, apps, **kwargs): from uliweb import orm from os.path import dirname, join from uliweb.core.template import template_file from uliweb.orm import ReferenceProperty from uliweb.utils.textconvert import text2html from sqlalchemy.schema import CreateIndex menus = [] for app in apps: section = {'name': '%s' % app, 'items': []} t = get_model_tables(tables, app) if not t: continue for tablename in t: item = { 'app_name': app.replace('.', '_'), 'name': tablename, 'caption': tablename, } try: M = orm.get_model(tablename) except: continue item['label'] = getattr(M, '__verbose_name__', tablename) if tablename != M.tablename: item['caption'] += ' - ' + M.tablename section['items'].append(item) menus.append(section) all_tables = [] for name, t in sorted(tables.iteritems()): model = { 'name': name, 'fields': [], 'relations': [], 'choices': [], 'indexes': [], } if hasattr(t, '__appname__'): model['appname'] = text2html(t.__appname__) else: model['appname'] = None M = None try: M = orm.get_model(name) except: pass if getattr(M, '__verbose_name__', None): model['label'] = "%s(%s)" % (name, getattr(M, '__verbose_name__', None)) else: model['label'] = name if name != getattr(M, 'tablename', name): model['label'] += ' - ' + M.tablename #Add docstring for Model if M.__doc__: model['desc'] = M.__doc__ else: model['desc'] = '' #process indexes for x in t.indexes: model['indexes'].append(CreateIndex(x)) star_index = 0 for tablefield in sorted(t.c, key=lambda x: (x.name)): field = { 'name': tablefield.name, 'type': tablefield.type, 'nullable': tablefield.nullable, 'primary_key': tablefield.primary_key } field['reftable'] = None field['star'] = False field['label'] = "%s" % tablefield.name if M: ppp = M.properties[tablefield.name] if getattr(ppp, 'verbose_name', None): field['label'] = "%s" % (getattr(ppp, 'verbose_name', None)) if getattr(ppp, 'choices', None): choices_list = getattr(ppp, 'choices', None) if callable(choices_list): choices_list = choices_list() if choices_list: star_index = star_index + 1 model['choices'].append({ 'index': star_index, 'fieldlabel': field['label'], 'fieldname': field['name'], 'list': choices_list }) field['star'] = star_index if ppp and ppp.__class__ is ReferenceProperty: field['reftable'] = "%s" % ppp.reference_class.tablename model['fields'].append(field) all_tables.append(model) database = {} database["menus"] = menus database["tables"] = all_tables return template_file(join(dirname(__file__), "templates/docindex.html"), database)
def add_index(self, index): add_index = CreateIndex(index, bind=self._dbengine) add_index.execute()
def _assert_impl(self, impl, colnames=None, ddl_contains=None, ddl_not_contains=None, dialect='default', schema=None): context = op_fixture(dialect=dialect) impl._create(context.impl) if colnames is None: colnames = ['id', 'x', 'y'] eq_(impl.new_table.c.keys(), colnames) pk_cols = [col for col in impl.new_table.c if col.primary_key] eq_(list(impl.new_table.primary_key), pk_cols) create_stmt = str( CreateTable(impl.new_table).compile(dialect=context.dialect)) create_stmt = re.sub(r'[\n\t]', '', create_stmt) idx_stmt = "" for idx in impl.indexes.values(): idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect)) for idx in impl.new_indexes.values(): impl.new_table.name = impl.table.name idx_stmt += str(CreateIndex(idx).compile(dialect=context.dialect)) impl.new_table.name = '_alembic_batch_temp' idx_stmt = re.sub(r'[\n\t]', '', idx_stmt) if ddl_contains: assert ddl_contains in create_stmt + idx_stmt if ddl_not_contains: assert ddl_not_contains not in create_stmt + idx_stmt expected = [ create_stmt, ] if schema: args = {"schema": "%s." % schema} else: args = {"schema": ""} args['colnames'] = ", ".join([ impl.new_table.c[name].name for name in colnames if name in impl.table.c ]) args['tname_colnames'] = ", ".join( "CAST(%(schema)stname.%(name)s AS %(type)s) AS anon_1" % { 'schema': args['schema'], 'name': name, 'type': impl.new_table.c[name].type } if ( impl.new_table.c[name].type._type_affinity is not impl.table. c[name].type._type_affinity) else "%(schema)stname.%(name)s" % { 'schema': args['schema'], 'name': name } for name in colnames if name in impl.table.c) expected.extend([ 'INSERT INTO %(schema)s_alembic_batch_temp (%(colnames)s) ' 'SELECT %(tname_colnames)s FROM %(schema)stname' % args, 'DROP TABLE %(schema)stname' % args, 'ALTER TABLE %(schema)s_alembic_batch_temp ' 'RENAME TO %(schema)stname' % args ]) if idx_stmt: expected.append(idx_stmt) context.assert_(*expected) return impl.new_table
def create_index(self, index): mssql_include = index.kwargs.get('mssql_include', ()) for col in mssql_include: if col not in index.table.c: index.table.append_column(Column(col, sqltypes.NullType)) self._exec(CreateIndex(index))