def table_ddl(tables, engine, drop=False): output = [] for table in tables: if not drop: ddl = CreateTable(table) else: ddl = DropTable(table) output.append(str(ddl.compile(dialect=engine.dialect)).strip()) output.append(';\n\n') return output
def createsql(app, out): metadata = Base.metadata db = database.get_database(app) metadata.bind = db tables = metadata.sorted_tables for table in tables: sql = CreateTable(table).compile(db.engine).string sql = sql.strip() + ';\n' indexes = table.indexes for index in sorted(indexes, key=lambda i: i.name): sql += CreateIndex(index).compile(db.engine).string + ';\n' sql += '\n' out.write(sql)
def get_pool(): pool = yield from create_engine(dsn) connection = yield from pool.acquire() try: result = yield from connection.execute( 'SELECT tablename FROM pg_tables ' 'WHERE schemaname=%s', ('public', )) existing_table_names = {name[0] for name in result} print('Existing tables:', existing_table_names) for name, table in tables.metadata.tables.items(): if name not in existing_table_names: create_statement = CreateTable(table) print(create_statement.compile(dialect=dialect)) yield from connection.execute(create_statement) finally: connection.close() return pool
def create_sql(tables=None, out=sys.stdout): u''' :param tables [str]: :param out: file obj ''' metadata = BaseModel.metadata metadata.bind = db.session(BaseModel.default_session_name).get_bind() all_tables = metadata.sorted_tables if not tables: target_tables = all_tables else: target_tables = [t for t in all_tables if t.name in tables] if len(tables) != len(target_tables): raise ValueError('invalid tables. {}'.format(tables)) for table in sorted(target_tables, key=lambda t: t.name): sql = CreateTable(table).compile(db.engine).string sql = sql.strip() + ';\n' indexes = table.indexes for index in sorted(indexes, key=lambda i: i.name): sql += CreateIndex(index).compile(db.engine).string + ';\n' sql += '\n' out.write(sql)
def main(): table_name = 'import' if len(sys.argv) == 1: input_, output_ = fileinput.input(), sys.stdout elif len(sys.argv) == 2: input_, output_ = open(sys.argv[1]), sys.stdout elif len(sys.argv) == 3: table_name = sys.argv[2] input_, output_ = open(sys.argv[1]), open(sys.argv[2], 'w+') else: print("Usage: ./create-csv-table.py [input] [output]") sys.exit(1) reader = csv.reader(input_) next(reader) # discard "Field,Max Length" matrix = [] for row in reader: matrix.append([row[0], int(row[1]) + 1]) columns = [Column(n, String(l)) for n, l in matrix] table = CreateTable(Table(table_name, MetaData(), *columns)) output_.write(str(table.compile(dialect=mysql.dialect())))
def go(): conn = yield from self.connect() res = yield from conn.execute(DropTable(tbl)) with self.assertRaises(sa.ResourceClosedError): yield from res.fetchmany() with self.assertRaises(psycopg2.ProgrammingError): yield from conn.execute("SELECT * FROM sa_tbl") res = yield from conn.execute(CreateTable(tbl)) with self.assertRaises(sa.ResourceClosedError): yield from res.fetchmany() res = yield from conn.execute("SELECT * FROM sa_tbl") self.assertEqual(0, len(list(res)))
async def test_transaction_commit(engine, mytable): async with engine.connect() as conn: trans = await conn.begin() await conn.execute(CreateTable(mytable)) await conn.execute(mytable.insert()) result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1 await trans.commit() result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1
def test_create_table_with_unicode_sortkey(self, compiler): table = Table('t1', MetaData(), Column('id', Integer, primary_key=True), Column('name', String), redshift_sortkey=u"id") create_table = CreateTable(table) actual = compiler.process(create_table) expected = (u"\nCREATE TABLE t1 (" u"\n\tid INTEGER NOT NULL, " u"\n\tname VARCHAR, " u"\n\tPRIMARY KEY (id)\n) " u"SORTKEY (id)\n\n") assert expected == actual, self._compare_strings(expected, actual)
def test_create_table(connect): conn = yield from connect() res = yield from conn.execute(DropTable(tbl)) with pytest.raises(sa.ResourceClosedError): yield from res.fetchmany() with pytest.raises(psycopg2.ProgrammingError): yield from conn.execute("SELECT * FROM sa_tbl") res = yield from conn.execute(CreateTable(tbl)) with pytest.raises(sa.ResourceClosedError): yield from res.fetchmany() res = yield from conn.execute("SELECT * FROM sa_tbl") assert 0 == len(list(res))
def main(url, schema=None, apply=False, alphabetical=False, quiet=False): """ Print or apply a reflected or loaded database schema. """ # output from cargo.log import ( get_logger, enable_default_logging, ) enable_default_logging() # build the particular database engine from cargo.sql.alchemy import make_engine engine = make_engine(url) # load the appropriate schema if schema is None: # examine the database to construct a schema from sqlalchemy.schema import MetaData metadata = MetaData(bind=engine.connect(), reflect=True) else: # load an already-defined schema from cargo.sugar import value_by_name metadata = value_by_name(schema) # print or apply the schema if apply: if not quiet: get_logger("sqlalchemy.engine", level="DEBUG") metadata.create_all(engine) else: # print the DDL from sqlalchemy.schema import CreateTable if alphabetical: sorted_tables = sorted(metadata.sorted_tables, key=lambda t: t.name) else: sorted_tables = metadata.sorted_tables for table in sorted_tables: print CreateTable(table).compile(engine)
def table_creates(table_name=None, column_names=None): metadata = MetaData() columns = [] for c in column_names: columns.append(Column('{}'.format(c), Text)) table = Table(table_name, metadata, *columns) tables = [table] # sqlalchemy engines d_ename_extension = { 'mysql+pyodbc://./MyDb': { 'extension': '_mssql.sql' }, # comment out some for now to declutter #'sqlite:///:memory:': {'extension': '_sqlite.sql'}, #'postgresql://': {'extension':'_postgresql.sql'}, #'oracle+cx_oracle://': {'extension':'_oracle.sql'}, 'mssql+pyodbc://': { 'extension': '_mssql.sql' }, } engines = [] for engine_name, extension in d_ename_extension.items(): # https://stackoverflow.com/questions/870925/how-to-generate-a-file-with-ddl-in-the-engines-sql-dialect-in-sqlalchemy engine = create_engine(engine_name, strategy='mock', executor=lambda sql, *multiparams, **params: print(sql.compile(dialect=engine.dialect))) engines.append(engine) for table in tables: print( '\n-----------------TABLE {}----------------------------\n'.format( table.name)) for i, (engine_name, extension) in enumerate(d_ename_extension.items()): engine = engines[i] print('-----------------ENGINE {}--------------------------\n'. format(engine_name)) #print (sql.compile(dialect=engine.dialect))) print(CreateTable(table).compile(engine)) print('======================================') return
async def initialise_db(app, loop): global engine engine = await create_engine( host='db', port=3306, user='******', password='******', db='db', loop=loop ) async with engine.acquire() as conn: await conn.execute(CreateTable(table)) await conn.execute( table.insert().values(endpoint='vlk', url='www.vlk.cz') )
def test_create_table_with_identity(self, compiler): table = Table( 't1', MetaData(), Column('id', Integer, primary_key=True, redshift_identity=[1, 2]), Column('name', String), ) create_table = CreateTable(table) actual = compiler.process(create_table) expected = (u"\nCREATE TABLE t1 (" u"\n\tid INTEGER IDENTITY(1,2) NOT NULL, " u"\n\tname VARCHAR, " u"\n\tPRIMARY KEY (id)\n)\n\n") assert expected == actual, self._compare_strings(expected, actual)
def test_create_table_with_diststyle(self, compiler): table = Table('t1', MetaData(), Column('id', Integer, primary_key=True), Column('name', String), redshift_diststyle="EVEN") create_table = CreateTable(table) actual = compiler.process(create_table) expected = u"\nCREATE TABLE t1 ("\ u"\n\tid INTEGER NOT NULL, "\ u"\n\tname VARCHAR, "\ u"\n\tPRIMARY KEY (id)\n) "\ u"DISTSTYLE EVEN\n\n" assert expected == actual, self._compare_strings(expected, actual)
async def test_transaction_rollback(asyncio_engine, mytable): async with asyncio_engine.connect() as conn: await conn.execute(CreateTable(mytable)) trans = await conn.begin() await conn.execute(mytable.insert()) result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1 await trans.rollback() result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 0
def test_create_table(connect): conn = yield from connect() res = yield from conn.execute(DropTable(tbl)) with pytest.raises(sa.ResourceClosedError): yield from res.fetchmany() with pytest.raises(aiosqlite3.OperationalError): yield from conn.execute("SELECT * FROM sa_tbl") res = yield from conn.execute(CreateTable(tbl)) with pytest.raises(sa.ResourceClosedError): yield from res.fetchmany() res = yield from conn.execute("SELECT * FROM sa_tbl") data = yield from async_res_list(res) assert 0 == len(data)
async def init_db(app): engine = create_engine( # In-memory sqlite database cannot be accessed from different # threads, use file. app['config']['dsn'], strategy=ASYNCIO_STRATEGY ) async with engine.connect() as conn: # TODO: check that DB doesn't exist before creating it for table in [user, group, group_org, event, attendee]: create_expr = CreateTable(table) try: await conn.execute(create_expr) except exc.OperationalError: pass app['engine'] = engine
def get_table_ddl(table: Table, dialect_name: str = SqlaDialectName.MYSQL) -> str: """ Returns the DDL (data definition language; SQL ``CREATE TABLE`` commands) for a specific table. Args: table: Table to dump. dialect_name: SQLAlchemy dialect name. https://stackoverflow.com/questions/2128717/sqlalchemy-printing-raw-sql-from-create """ # noqa dialect = get_dialect_from_name(dialect_name) return str(CreateTable(table).compile(dialect=dialect))
def test_create_column_skip(self): @compiles(CreateColumn) def skip_xmin(element, compiler, **kw): if element.element.name == 'xmin': return None else: return compiler.visit_create_column(element, **kw) t = Table('t', MetaData(), Column('a', Integer), Column('xmin', Integer), Column('c', Integer)) self.assert_compile( CreateTable(t), "CREATE TABLE t (a INTEGER, c INTEGER)" )
def generate_sql_by_table(table_codes=None): statements = [] if table_codes is None: table_codes = [] for f in SF1_FILE_SEGMENTS[1:]: table_codes.extend(f) statements = [] for table_code in table_codes: sql_table = _create_base_table(table_code) _add_sql_columns_for_table(sql_table, table_code) statements.append( unicode(CreateTable(sql_table).compile(dialect=None)).strip() + ';') return "\n\n".join(statements)
def createTableFromTable(name, tableNam, engine): """copy一个已有表的结构,并创建新的表 """ metadata = MetaData(engine) Base.metadata.reflect(engine) # 获取原表对象 table = Base.metadata.tables[tableNam] # 获取原表建表语句 c = str(CreateTable(table)) # 替换表名 c = c.replace("CREATE TABLE " + tableNam, "CREATE TABLE if not exists " + name) db_conn = engine.connect() db_conn.execute(c) db_conn.close() Base.metadata.clear()
def handle(self, options, global_options, *args): from sqlalchemy.schema import CreateTable, CreateIndex if not args: print "Failed! You should pass one or more tables name." sys.exit(1) engine = get_engine(options, global_options) tables = get_sorted_tables(get_tables(global_options.apps_dir, args, engine_name=options.engine, settings_file=global_options.settings, local_settings_file=global_options.local_settings)) for name, t in tables: print "%s;" % str(CreateTable(t)).rstrip() for x in t.indexes: print "%s;" % CreateIndex(x)
def test_on_null(self): t = Table( "foo_table", MetaData(), Column( "foo", Integer(), Identity(always=False, on_null=True, start=42, order=True), ), ) text = " ON NULL" if testing.against("oracle") else "" self.assert_compile( CreateTable(t), ("CREATE TABLE foo_table (foo INTEGER GENERATED BY DEFAULT" + text + " AS IDENTITY (START WITH 42 ORDER))"), )
def test_create_column_with_encoding(self, compiler): table = Table('t1', MetaData(), Column('id', Integer, primary_key=True, info=dict(encode="LZO")), Column('name', String) ) create_table = CreateTable(table) actual = compiler.process(create_table) expected = ( u"\nCREATE TABLE t1 (" u"\n\tid INTEGER ENCODE LZO NOT NULL, " u"\n\tname VARCHAR, " u"\n\tPRIMARY KEY (id)\n)\n\n" ) assert expected == actual, self._compare_strings(expected, actual)
async def test_implicit_transaction_success(trio_engine, mytable): if ':memory:' in str(trio_engine.sync_engine.url): pytest.skip(":memory: connections don't persist across threads") async with trio_engine.begin() as conn: assert isinstance(conn, AsyncConnection) await conn.execute(CreateTable(mytable)) await conn.execute(mytable.insert()) result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1 # Transaction should have been committed automatically result = await trio_engine.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1
def connect(self, **kwargs): engine = yield from sa.create_engine(database='aiopg', user='******', password='******', host='127.0.0.1', loop=self.loop, **kwargs) with (yield from engine) as conn: try: yield from conn.execute(DropTable(tbl)) except psycopg2.ProgrammingError: pass yield from conn.execute("DROP TYPE IF EXISTS simple_enum;") yield from conn.execute("""CREATE TYPE simple_enum AS ENUM ('first', 'second');""") yield from conn.execute(CreateTable(tbl)) return engine
async def test_transaction_context_manager_failure(asyncio_engine, mytable): async with asyncio_engine.connect() as conn: await conn.execute(CreateTable(mytable)) with pytest.raises(RuntimeError): async with conn.begin() as trans: await conn.execute(mytable.insert()) result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1 raise RuntimeError result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 0
async def test_begin_nested(asyncio_engine, mytable): async with asyncio_engine.connect() as conn: await conn.execute(CreateTable(mytable)) async with conn.begin() as trans1: await conn.execute(mytable.insert()) async with conn.begin_nested() as trans2: assert isinstance(trans2, AsyncTransaction) await conn.execute(mytable.insert()) await trans2.rollback() await trans1.commit() result = await conn.execute(mytable.select()) rows = await result.fetchall() assert len(rows) == 1
def test_create_table_all_together(self, compiler): table = Table('t1', MetaData(), Column('id', Integer, primary_key=True), Column('name', String), redshift_diststyle="KEY", redshift_distkey="id", redshift_sortkey=["id", "name"]) create_table = CreateTable(table) actual = compiler.process(create_table) expected = u"\nCREATE TABLE t1 ("\ u"\n\tid INTEGER NOT NULL, "\ u"\n\tname VARCHAR, "\ u"\n\tPRIMARY KEY (id)\n) "\ u"DISTSTYLE KEY DISTKEY (id) SORTKEY (id, name)\n\n" assert expected == actual, self._compare_strings(expected, actual)
def main(): DB = 'postgresql:///example' TABLE_SPEC = [('id', BigInteger), ('name', String), ('t_modified', DateTime), ('whatever', String)] TABLE_NAME = 'sample_table' columns = [Column(n, t) for n, t in TABLE_SPEC] table = Table(TABLE_NAME, MetaData(), *columns) with Session(DB, echo=True) as s: # this is just here to make the script idempotent s.execute('drop table if exists {}'.format(TABLE_NAME)) table_creation_sql = CreateTable(table) s.execute(table_creation_sql)
async def init_db(app: Sanic, db_prefix=''): db_creds = { 'user': app.config.DB_USER, 'password': app.config.DB_PASSWORD, 'host': app.config.DB_HOST, 'port': app.config.DB_PORT, 'database': 'postgres' } conn = await asyncpg.connect(**db_creds) query = f"CREATE DATABASE {app.config.DB_NAME}{'_' if db_prefix else ''}{db_prefix} OWNER {db_creds['user']};" await conn.fetchrow(query) await conn.close() pool = await get_pool() async with pool.acquire() as conn: for table in models: create_expr = CreateTable(table) await conn.execute(create_expr)
def save_model(models): prefix = '../' if 'tools' in realpath('.') else '' try: if isfile(prefix + 'src/schema.ts'): nameStr = prefix + 'src/schema-' + datetime.now().isoformat()[0:10] if isfile(nameStr + '.bkp'): i = 1 while isfile(nameStr + '(' + str(i) + ')' + '.bkp'): i += 1 nameStr = nameStr + '(' + str(i) + ')' rename(prefix + 'src/schema.ts', nameStr + '.bkp') clientModel = client_only_model(models) with open(prefix + 'src/schema.ts', 'w+') as f: f.write('''/** * F2B Schema * * This file is automatically generated. DO NOT MAKE CHANGES HERE. Use the schema builder: * cd ltc-provider-app/tools * python3 schemaBuilder.py */ ''') f.write('export const schema = ') ujson.dump(clientModel, f, indent=2, sort_keys=False) f.write(';\n\nexport const schemaSQL = `') sqliteModel = convert_sql_types(clientModel) db_engine = create_engine('sqlite:///:memory:') metadata = MetaData() for key in sqliteModel: model = sqliteModel[key] table = generateTable(key, model, metadata) f.write(str(CreateTable(table).compile(db_engine)) + ';') f.write('`;\n\n') f.write('export const version = \'' + str(uuid4()) + '\';\n') except: import sys print('Failed to save model') import traceback traceback.print_exc() print(sys.exc_info()[0])
def handle(self, options, global_options, *args): from sqlalchemy.schema import CreateTable, CreateIndex engine = get_engine(options, global_options) tables = get_sorted_tables( get_tables(global_options.apps_dir, tables=args, engine_name=options.engine, settings_file=global_options.settings, local_settings_file=global_options.local_settings)) for name, t in tables: if t.__mapping_only__: continue print "%s;" % str( CreateTable(t).compile(dialect=engine.dialect)).rstrip() for x in t.indexes: print "%s;" % CreateIndex(x)
async def set_up(): import databases from sqlalchemy.schema import CreateTable from starlette.config import Config config = Config('.env') DATABASE_URL = config('DATABASE_URL') database = databases.Database(DATABASE_URL) await database.connect() await database.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";') for table in tables: try: await database.execute(str(CreateTable(table))) except Exception as e: print(e) await database.disconnect()