def _prep_testing_database(options, file_config): from sqlalchemy.testing import config from sqlalchemy import schema, inspect if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView(schema.Table(vname, schema.MetaData()))) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema"))) for tname in reversed(inspector.get_table_names(order_by="foreign_key")): e.execute(schema.DropTable(schema.Table(tname, schema.MetaData()))) if config.requirements.schemas.enabled_for_config(cfg): for tname in reversed(inspector.get_table_names( order_by="foreign_key", schema="test_schema")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema")))
def __init__(self, **kwargs): self._logger = self._get_logger() self._format_date = kwargs.get('format_date') self._source: SourceSinkDescriptor = SourceSinkDescriptor() self._sink: SourceSinkDescriptor = SourceSinkDescriptor() self._job_id = None self._run_id = None self._manifest_name = None self._ddl_file = kwargs.get('ddl_file') self._env = kwargs.get('connection', 'state_manager') self._conn = pf.create(key=kwargs.get('dao'), configuration={ 'connection': kwargs.get('connection', 'state_manager') }) # TODO: Pass Table name as args metadata = schema.MetaData(bind=self._conn.engine) metadata.reflect() if ProjectConfig.state_manager_table_name( ) not in metadata.tables.keys(): with open(self._ddl_file, 'r') as stream: ddl = stream.read() with self._conn.connection as conn: conn.execute(ddl) metadata = schema.MetaData(bind=self._conn.engine) metadata.reflect() self._table: schema.Table = metadata.tables[ ProjectConfig.state_manager_table_name()]
def _prep_testing_database(options, file_config): from alembic.testing import config from alembic.testing.exclusions import against from sqlalchemy import schema from alembic import util if util.sqla_08: from sqlalchemy import inspect else: from sqlalchemy.engine.reflection import Inspector inspect = Inspector.from_engine if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData()) )) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names( schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute(schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema") )) for tname in reversed(inspector.get_table_names( order_by="foreign_key")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData()) )) if config.requirements.schemas.enabled_for_config(cfg): for tname in reversed(inspector.get_table_names( order_by="foreign_key", schema="test_schema")): e.execute(schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema") )) if against(cfg, "postgresql") and util.sqla_100: from sqlalchemy.dialects import postgresql for enum in inspector.get_enums("*"): e.execute(postgresql.DropEnumType( postgresql.ENUM( name=enum['name'], schema=enum['schema'])))
def _prep_testing_database(options, file_config): from sqlalchemy.testing import config, util from sqlalchemy.testing.exclusions import against from sqlalchemy import schema, inspect if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db # TODO: this has to be part of provision.py in postgresql if against(cfg, "postgresql"): with e.connect().execution_options( isolation_level="AUTOCOMMIT") as conn: for xid in conn.execute( "select gid from pg_prepared_xacts").scalars(): conn.execute("ROLLBACK PREPARED '%s'" % xid) inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView(schema.Table(vname, schema.MetaData()))) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView( schema.Table( vname, schema.MetaData(), schema="test_schema", ))) util.drop_all_tables(e, inspector) if config.requirements.schemas.enabled_for_config(cfg): util.drop_all_tables(e, inspector, schema=cfg.test_schema) # TODO: this has to be part of provision.py in postgresql if against(cfg, "postgresql"): from sqlalchemy.dialects import postgresql for enum in inspector.get_enums("*"): e.execute( postgresql.DropEnumType( postgresql.ENUM(name=enum["name"], schema=enum["schema"])))
def _prep_testing_database(options, file_config): from sqlalchemy.testing import config, util from sqlalchemy.testing.exclusions import against from sqlalchemy import schema, inspect if options.dropfirst: for cfg in config.Config.all_configs(): e = cfg.db inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView( schema.Table(vname, schema.MetaData()) ) ) if config.requirements.schemas.enabled_for_config(cfg): try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView( schema.Table( vname, schema.MetaData(), schema="test_schema", ) ) ) util.drop_all_tables(e, inspector) if config.requirements.schemas.enabled_for_config(cfg): util.drop_all_tables(e, inspector, schema=cfg.test_schema) if against(cfg, "postgresql"): from sqlalchemy.dialects import postgresql for enum in inspector.get_enums("*"): e.execute( postgresql.DropEnumType( postgresql.ENUM( name=enum["name"], schema=enum["schema"] ) ) )
def _compare_tables(conn_table_names, metadata_table_names, object_filters, inspector, metadata, diffs, autogen_context): for s, tname in metadata_table_names.difference(conn_table_names): name = '%s.%s' % (s, tname) if s else tname metadata_table = metadata.tables[sa_schema._get_table_key(tname, s)] if _run_filters(metadata_table, tname, "table", False, None, object_filters): diffs.append(("add_table", metadata.tables[name])) log.info("Detected added table %r", name) _compare_indexes(s, tname, object_filters, None, metadata_table, diffs, autogen_context, inspector, set()) removal_metadata = sa_schema.MetaData() for s, tname in conn_table_names.difference(metadata_table_names): name = sa_schema._get_table_key(tname, s) exists = name in removal_metadata.tables t = sa_schema.Table(tname, removal_metadata, schema=s) if not exists: inspector.reflecttable(t, None) if _run_filters(t, tname, "table", True, None, object_filters): diffs.append(("remove_table", t)) log.info("Detected removed table %r", name) existing_tables = conn_table_names.intersection(metadata_table_names) existing_metadata = sa_schema.MetaData() conn_column_info = {} for s, tname in existing_tables: name = sa_schema._get_table_key(tname, s) exists = name in existing_metadata.tables t = sa_schema.Table(tname, existing_metadata, schema=s) if not exists: inspector.reflecttable(t, None) conn_column_info[(s, tname)] = t for s, tname in sorted(existing_tables): name = '%s.%s' % (s, tname) if s else tname metadata_table = metadata.tables[name] conn_table = existing_metadata.tables[name] if _run_filters(metadata_table, tname, "table", False, conn_table, object_filters): _compare_columns(s, tname, object_filters, conn_table, metadata_table, diffs, autogen_context, inspector) c_uniques = _compare_uniques(s, tname, object_filters, conn_table, metadata_table, diffs, autogen_context, inspector) _compare_indexes(s, tname, object_filters, conn_table, metadata_table, diffs, autogen_context, inspector, c_uniques)
def _primary_key_constraint(self, name, table_name, cols, schema=None): m = sa_schema.MetaData() columns = [sa_schema.Column(n, NULLTYPE) for n in cols] t1 = sa_schema.Table(table_name, m, *columns, schema=schema) p = sa_schema.PrimaryKeyConstraint(*columns, name=name) t1.append_constraint(p) return p
def set_metadata_by_reflect(self, schema_name): ''' Given a schema name, reflect on current database and set the metadata ''' metadata = schema.MetaData(bind=self.get_engine(), schema=schema_name) metadata.reflect(views=True) self._set_metadata(metadata)
def down_daily_all(trade_date): df = pro.daily(trade_date=trade_date) df['code'] = df['ts_code'].str.split('.', expand=True)[0] cols = [ 'trade_date', 'ts_code', 'code', 'open', 'high', 'low', 'close', 'pre_close', 'change', 'pct_chg', 'vol', 'amount' ] df = df.ix[:, cols] listToWrite = df.to_dict(orient='records') metadata = schema.MetaData(bind=engine, reflect=True) table = Table(tableToWriteTo, metadata, autoload=True) # Open the session Session = sessionmaker(bind=engine) session = Session() # xd.to_sql('stock_real_data', engine, if_exists='append') # xd.to_csv(fss, index=False, encoding='gbk') # Inser the dataframe into the database in one bulk conn.execute(table.insert(), listToWrite) # Commit the changes session.commit() # Close the session session.close() return df
def _foreign_key_constraint(self, name, source, referent, local_cols, remote_cols, onupdate=None, ondelete=None, deferrable=None, source_schema=None, referent_schema=None): m = sa_schema.MetaData() if source == referent: t1_cols = local_cols + remote_cols else: t1_cols = local_cols sa_schema.Table(referent, m, *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], schema=referent_schema) t1 = sa_schema.Table(source, m, *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], schema=source_schema) tname = "%s.%s" % (referent_schema, referent) if referent_schema \ else referent f = sa_schema.ForeignKeyConstraint(local_cols, ["%s.%s" % (tname, n) for n in remote_cols], name=name, onupdate=onupdate, ondelete=ondelete, deferrable=deferrable ) t1.append_constraint(f) return f
def create_sa_proxies(self): # create the table and mapper metadata = schema.MetaData() user_table = schema.Table( 'user', metadata, schema.Column('id', types.Integer, primary_key=True), schema.Column('first_name', types.Unicode(25)), schema.Column('last_name', types.Unicode(25))) class User(object): pass orm.mapper(User, user_table) # create the session engine = create_engine('sqlite:///:memory:') metadata.bind = engine metadata.create_all() session = orm.sessionmaker(bind=engine)() # add some dummy data user_table.insert().execute([{ 'first_name': 'Jonathan', 'last_name': 'LaCour' }, { 'first_name': 'Yoann', 'last_name': 'Roman' }]) # get the SA objects self.sa_object = session.query(User).first() select = user_table.select() self.result_proxy = select.execute() self.row_proxy = select.execute().fetchone()
def get_code(): data = pro.query('stock_basic', exchange='', list_status='L', fields='ts_code,symbol,name,area,industry') data.rename(columns={'symbol': 'code'}, inplace=True) cols = ['ts_code', 'code', 'name', 'area', 'industry'] data = data.ix[:, cols] listToWrite = data.to_dict(orient='records') metadata = schema.MetaData(bind=engine, reflect=True) table = Table(tableToWriteTo, metadata, autoload=True) # Open the session Session = sessionmaker(bind=engine) session = Session() # xd.to_sql('stock_real_data', engine, if_exists='append') # xd.to_csv(fss, index=False, encoding='gbk') # Inser the dataframe into the database in one bulk conn.execute(table.insert(), listToWrite) # Commit the changes session.commit() # Close the session session.close()
def _compare_tables(conn_table_names, metadata_table_names, inspector, metadata, diffs, autogen_context): for s, tname in metadata_table_names.difference(conn_table_names): name = '%s.%s' % (s, tname) if s else tname diffs.append(("add_table", metadata.tables[name])) log.info("Detected added table %r", name) removal_metadata = sa_schema.MetaData() for s, tname in conn_table_names.difference(metadata_table_names): name = '%s.%s' % (s, tname) if s else tname exists = name in removal_metadata.tables t = sa_schema.Table(tname, removal_metadata, schema=s) if not exists: inspector.reflecttable(t, None) diffs.append(("remove_table", t)) log.info("Detected removed table %r", name) existing_tables = conn_table_names.intersection(metadata_table_names) conn_column_info = dict( ((s, tname), dict((rec["name"], rec) for rec in inspector.get_columns(tname, schema=s))) for s, tname in existing_tables) for s, tname in sorted(existing_tables): name = '%s.%s' % (s, tname) if s else tname _compare_columns(s, tname, conn_column_info[(s, tname)], metadata.tables[name], diffs, autogen_context)
def _prep_testing_database(options, file_config): from testlib import engines from sqlalchemy import schema try: # also create alt schemas etc. here? if options.dropfirst: e = engines.utf8_engine() existing = e.table_names() if existing: if not options.quiet: print "Dropping existing tables in database: " + db_url try: print "Tables: %s" % ', '.join(existing) except: pass print "Abort within 5 seconds..." time.sleep(5) md = schema.MetaData(e, reflect=True) md.drop_all() e.dispose() except (KeyboardInterrupt, SystemExit): raise except Exception, e: if not options.quiet: warnings.warn( RuntimeWarning("Error checking for existing tables in testing " "database: %s" % e))
def tearDown(self): sqlalchemy.orm.session.Session.close_all() meta = sqlalchemy.MetaData() meta.bind = self.engine meta.reflect(self.engine) with self.engine.begin() as conn: inspector = reflection.Inspector.from_engine(self.engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) sql.cleanup() super(SqlMigrateBase, self).tearDown()
def setUp(self): super(DropAllObjectsTest, self).setUp() self.metadata = metadata = schema.MetaData() schema.Table('a', metadata, schema.Column('id', types.Integer, primary_key=True), mysql_engine='InnoDB') schema.Table('b', metadata, schema.Column('id', types.Integer, primary_key=True), schema.Column('a_id', types.Integer, schema.ForeignKey('a.id')), mysql_engine='InnoDB') schema.Table('c', metadata, schema.Column('id', types.Integer, primary_key=True), schema.Column('b_id', types.Integer, schema.ForeignKey('b.id')), schema.Column( 'd_id', types.Integer, schema.ForeignKey('d.id', use_alter=True, name='c_d_fk')), mysql_engine='InnoDB') schema.Table('d', metadata, schema.Column('id', types.Integer, primary_key=True), schema.Column('c_id', types.Integer, schema.ForeignKey('c.id')), mysql_engine='InnoDB') metadata.create_all(self.engine, checkfirst=False) # will drop nothing if the test worked self.addCleanup(metadata.drop_all, self.engine, checkfirst=True)
def _metadata(self): kw = {} if 'target_metadata' in self.migration_context.opts: mt = self.migration_context.opts['target_metadata'] if hasattr(mt, 'naming_convention'): kw['naming_convention'] = mt.naming_convention return sa_schema.MetaData(**kw)
def _cleanupDB(self): meta = sqlalchemy.MetaData() meta.bind = self.engine meta.reflect(self.engine) with self.engine.begin() as conn: inspector = reflection.Inspector.from_engine(self.engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk['name'])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) for fkc in all_fks: if self.engine.name != 'sqlite': conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table))
def __init__(self): self.session = None self.session_offline = None self.engine = None self.status = False self.synchro = False self.echo = app.debug self.metadata = schema.MetaData()
def _check_constraint(self, name, source, condition, schema=None, **kw): t = sa_schema.Table(source, sa_schema.MetaData(), sa_schema.Column('x', Integer), schema=schema) ck = sa_schema.CheckConstraint(condition, name=name, **kw) t.append_constraint(ck) return ck
def metadata(self): kw = {} if (self.migration_context is not None and "target_metadata" in self.migration_context.opts): mt = self.migration_context.opts["target_metadata"] if hasattr(mt, "naming_convention"): kw["naming_convention"] = mt.naming_convention return sa_schema.MetaData(**kw)
def _reflect_tables(): meta = schema.MetaData() schema_name = _get_schema_name() _logger.info("Reflecting database schema from postgres schema '{}'." .format(schema_name)) meta.reflect(bind=_get_engine(), schema=schema_name) return meta
def generate_metadata(schema_name=None, bind=None): ''' Generates metadata for repmgr ''' metadata = None if bind: metadata = schema.MetaData(bind=bind, schema=schema_name) metadata.reflect(views=True) return metadata
def _unique_constraint(self, name, source, local_cols, **kw): t = schema.Table(source, schema.MetaData(), *[schema.Column(n, NULLTYPE) for n in local_cols]) kw['name'] = name uq = schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) # TODO: need event tests to ensure the event # is fired off here t.append_constraint(uq) return uq
def _prep_testing_database(options, file_config): from sqlalchemy.testing import engines from sqlalchemy import schema, inspect # also create alt schemas etc. here? if options.dropfirst: e = engines.utf8_engine() inspector = inspect(e) try: view_names = inspector.get_view_names() except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView(schema.Table(vname, schema.MetaData()))) try: view_names = inspector.get_view_names(schema="test_schema") except NotImplementedError: pass else: for vname in view_names: e.execute( schema._DropView( schema.Table(vname, schema.MetaData(), schema="test_schema"))) for tname in reversed( inspector.get_table_names(order_by="foreign_key")): e.execute(schema.DropTable(schema.Table(tname, schema.MetaData()))) for tname in reversed( inspector.get_table_names(order_by="foreign_key", schema="test_schema")): e.execute( schema.DropTable( schema.Table(tname, schema.MetaData(), schema="test_schema"))) e.dispose()
def setup_engine(self, url): self.engine = create_engine(url) #'sqlite:///:memory:', echo=True) self.metadata = schema.MetaData() self.metadata.bind = self.engine orm.clear_mappers() sm = orm.sessionmaker(bind=self.engine, autoflush=True, autocommit=False, expire_on_commit=True) self.session = orm.scoped_session(sm)
def __init__(self, ): self.engine = create_engine(DB_Conn_Str, pool_size=2, max_overflow=0, pool_recycle=3600, pool_pre_ping=True) self.metadata = schema.MetaData(bind=self.engine) self.scheduled_jobs = schema.Table('scheduled_jobs', self.metadata, autoload=True)
def setUpAlchemy(self): """ Init SQLAlchemy engine """ engine = create_engine(self.config['dns']) metadata = schema.MetaData() Session = scoped_session(sessionmaker(bind=engine)) Base = declarative_base(metadata=metadata) self.alchemy = {'Base': Base, 'Session': Session, 'engine': engine}
def maybe(*args, **kw): metadata = schema.MetaData(db) context = dict(fn.func_globals) context['metadata'] = metadata # jython bug #1034 rebound = types.FunctionType(fn.func_code, context, fn.func_name, fn.func_defaults, fn.func_closure) try: return rebound(*args, **kw) finally: metadata.drop_all()
def down_min_real010(rdat, xcod, xtyp='5', fgIndex=False): ''' 下载大盘指数数据,简版股票数据,可下载到1994年股市开市起 【输入】 rdat,数据文件目录 xcod:股票、指数代码 finx:股票、指数代码文件 xtyp (str):k线数据模式,默认为D,日线 D=日 W=周 M=月 ;5=5分钟 15=15分钟 ,30=30分钟 60=60分钟 fgIndex,指数下载模式;默认为 False,股票下载模式。 ''' xd = [] xtim = arrow.now().format('YYYY-MM-DD') fss = rdat + xcod + '.csv' if fgIndex: fss = rdat + 'inx_' + xcod + '.csv' # print('f,',fss) print('\n', fss, ",", xtim) # ----------- try: xd = ts.get_k_data(xcod, index=fgIndex, start=xtim, end=xtim, ktype=xtyp) # ------------- if len(xd) > 0: xd = xd[zsys.ohlcDVLst] # print('\nxd5\n',xd.head()) xd = xd.sort_values(['date'], ascending=True) xd['code'] = xcod xd = xd[xd.date > xtim] listToWrite = xd.to_dict(orient='records') metadata = schema.MetaData(bind=engine, reflect=True) table = Table(tableToWriteTo, metadata, autoload=True) # Open the session Session = sessionmaker(bind=engine) session = Session() # xd.to_sql('stock_real_data', engine, if_exists='append') # xd.to_csv(fss, index=False, encoding='gbk') # Inser the dataframe into the database in one bulk conn.execute(table.insert(), listToWrite) # Commit the changes session.commit() # Close the session session.close() except IOError: # print(IOError) pass # skip,error return xd