示例#1
0
def _drop_everything(engine: sqlalchemy.engine.base.Engine):
    """(On a live db) drops all foreign key constraints before dropping all tables.
    Workaround for SQLAlchemy not doing DROP ## CASCADE for drop_all()
    (https://github.com/pallets/flask-sqlalchemy/issues/722)
    """

    con = engine.connect()
    trans = con.begin()
    inspector = Inspector.from_engine(engine)

    # We need to re-create a minimal metadata with only the required things to
    # successfully emit drop constraints and tables commands for postgres (based
    # on the actual schema of the running instance)
    meta = MetaData()
    tables = []
    all_fkeys = []

    for table_name in inspector.get_table_names():
        fkeys = []

        for fkey in inspector.get_foreign_keys(table_name):
            if not fkey["name"]:
                continue

            fkeys.append(ForeignKeyConstraint((), (), name=fkey["name"]))

        tables.append(Table(table_name, meta, *fkeys))
        all_fkeys.extend(fkeys)

    for fkey in all_fkeys:
        con.execute(DropConstraint(fkey))

    for table in tables:
        con.execute(DropTable(table))

    trans.commit()
    Base.metadata.drop_all(engine)
示例#2
0
def ddl_from_json(schema_json):
    """
    This function takes the schema definition in JSON format that's returned by
    the Canvas Data API and returns SQL DDL statements that can be used to create
    all of the tables necessary to hold the archived data.
    """
    metadata = MetaData()
    create_ddl = []
    drop_ddl = []
    for artifact in schema_json:
        table_name = schema_json[artifact]['tableName']
        json_columns = schema_json[artifact]['columns']

        t = Table(table_name, metadata)

        for j_col in json_columns:
            sa_col = _get_column(table_name, j_col)
            if sa_col is not None:
                t.append_column(sa_col)

        create_ddl.append(str(CreateTable(t)))
        drop_ddl.append(str(DropTable(t, if_exists=True)))

    return create_ddl, drop_ddl
示例#3
0
 def database_init(self, app, db, data_logger, store_conf):
     _logger = logging.getLogger(__name__)
     # initialize Database configuration
     from sqlalchemy.engine.reflection import Inspector
     from sqlalchemy.schema import (
         DropConstraint,
         DropTable,
         MetaData,
         Table,
         ForeignKeyConstraint,
     )
     con = db.engine.connect()
     trans = con.begin()
     inspector = Inspector.from_engine(db.engine)
     meta = MetaData()
     tables = []
     all_fkeys = []
     for table_name in inspector.get_table_names():
         fkeys = []
         for fkey in inspector.get_foreign_keys(table_name):
             if not fkey["name"]:
                 continue
             fkeys.append(ForeignKeyConstraint((), (), name=fkey["name"]))
         tables.append(Table(table_name, meta, *fkeys))
         all_fkeys.extend(fkeys)
     for fkey in all_fkeys:
         con.execute(DropConstraint(fkey))
     for table in tables:
         con.execute(DropTable(table))
     trans.commit()
     _logger.info("Database dropped")
     # create the data structure from the store plugin config file
     self.init_data_structure(app, db, store_conf)
     _logger.info("Data structure created")
     self.seed_init_data(app, db)
     _logger.info("Initial data seed done")
示例#4
0
    def db_drop_everything(self, table_list):
        """ From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything """
        try:
            logger.warning("SIGNAL DROP -> %s" % table_list)
            conn = self.engine.connect()
            transactional = conn.begin()
            inspector = reflection.Inspector.from_engine(self.engine)

            tables = []
            all_foreign_keys = []

            for table_name in inspector.get_table_names():
                if table_name in table_list:
                    fks = []
                    for fk in inspector.get_foreign_keys(table_name):
                        if not fk["name"]:
                            continue
                        fks.append(
                            ForeignKeyConstraint((), (), name=fk["name"]))
                    t = Table(table_name, self.metadata, *fks)
                    tables.append(t)
                    all_foreign_keys.extend(fks)

            for foreignkey in all_foreign_keys:
                conn.execute(DropConstraint(foreignkey))

            for table in tables:
                conn.execute(DropTable(table))

            transactional.commit()
            return True
        except Exception as err:
            logger.error("db_drop_everything [error] -> %s" % err)
            return False
        finally:
            conn.close()
def _drop_everything(engine: Engine):
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    # inspector = reflection.Inspector.from_engine(engine)
    inspector = inspect(engine)
    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
示例#6
0
 async def fin():
     drop_expr = DropTable(sa_table)
     async with database.acquire() as conn:
         await conn.execute(drop_expr)
         await conn.execute('commit;')
示例#7
0
 def drop_tables(tables: Iterable[Table]) -> List[DDLElement]:
     return [DropTable(table) for table in tables]
示例#8
0
    def delete_table(self):
        logger.info('try to delete table {} in {}'.format(
            self.sql_table_name, self.schema))

        table = self.get_sql_table_object(need_columns=False)
        return self.local_engine.execute(DropTable(table))
示例#9
0
 async def drop_table(self, table_name, schema=None):
     schema = schema or self.meta.schema
     if self.has_table(table_name, schema):
         async with self.engine.connect() as connection:
             query = DropTable(Table(table_name, self.meta, schema=schema)).compile(dialect=self.engine.dialect())
             await connection.execute(str(query))
示例#10
0
 def delete(self, symbol):
     table = Table(symbol, MetaData(), autoload_with=self._engine)
     self._engine.execute(DropTable(table), if_exists=True)
示例#11
0
文件: conftest.py 项目: tukva/betagr
async def drop_tables():
    async with Connection() as conn:
        await conn.execute(DropTable(Parser.team))
        await conn.execute(DropTable(Parser.real_team))
        await conn.execute(DropTable(Parser.link))
示例#12
0
async def drop_tables(db_uri: str) -> None:
    pool = await asyncpgsa.create_pool(db_uri)
    async with pool.acquire() as conn:
        for table in models:
            q = DropTable(table)
            await conn.execute(q)
示例#13
0
文件: conftest.py 项目: tukva/betagr
async def drop_tables():
    async with Connection() as conn:
        await conn.execute(DropTable(tb_team))
        await conn.execute(DropTable(tb_real_team))
示例#14
0
 def __delete_table(self, table_name):
     """
     Delete the table.
     """
     DropTable(table_name)
示例#15
0
    def run(self):
        self.parse_options()

        try:
            configfile = self.opt.conf
            if configfile == None:
                # Look for config file in default locations
                for loc in defaults.CONFIG_LOCATIONS:
                    if os.path.exists(loc):
                        config = HaizeaConfig.from_file(loc)
                        break
                else:
                    print >> sys.stdout, "No configuration file specified, and none found at default locations."
                    print >> sys.stdout, "Make sure a config file exists at:\n  -> %s" % "\n  -> ".join(
                        defaults.CONFIG_LOCATIONS)
                    print >> sys.stdout, "Or specify a configuration file with the --conf option."
                    exit(1)
            else:
                config = HaizeaConfig.from_file(configfile)

            ans = raw_input(
                "Are you sure you want to clear all the experiments from databas? Y/N? "
            )

            if ans == 'Y' or ans == 'y':
                from sqlalchemy.engine import reflection
                from sqlalchemy import create_engine
                from sqlalchemy.schema import (
                    MetaData,
                    Table,
                    DropTable,
                    ForeignKeyConstraint,
                    DropConstraint,
                )

                engine = create_engine(
                    'sqlite:///%s' %
                    os.path.expanduser(config.get("datafile")),
                    echo=False)

                conn = engine.connect()

                # the transaction only applies if the DB supports
                # transactional DDL, i.e. Postgresql, MS SQL Server
                trans = conn.begin()

                inspector = reflection.Inspector.from_engine(engine)

                # gather all data first before dropping anything.
                # some DBs lock after things have been dropped in
                # a transaction.

                metadata = MetaData()

                tbs = []
                all_fks = []

                for table_name in inspector.get_table_names():
                    fks = []
                    for fk in inspector.get_foreign_keys(table_name):
                        if not fk['name']:
                            continue
                        fks.append(
                            ForeignKeyConstraint((), (), name=fk['name']))
                    t = Table(table_name, metadata, *fks)
                    tbs.append(t)
                    all_fks.extend(fks)

                for fkc in all_fks:
                    conn.execute(DropConstraint(fkc))

                for table in tbs:
                    conn.execute(DropTable(table))

                trans.commit()

        except ConfigException, msg:
            print >> sys.stderr, "Error in configuration file:"
            print >> sys.stderr, msg
            exit(1)
示例#16
0
trans = conn.begin()

inspector = reflection.Inspector.from_engine(engine)

# gather all data first before dropping anything.
# some DBs lock after things have been dropped in
# a transaction.

metadata = MetaData()

tbs = []
all_fks = []

for table_name in inspector.get_table_names():
    fks = []
    for fk in inspector.get_foreign_keys(table_name):
        if not fk['name']:
            continue
        fks.append(ForeignKeyConstraint((), (), name=fk['name']))
    t = Table(table_name, metadata, *fks)
    tbs.append(t)
    all_fks.extend(fks)

for fkc in all_fks:
    conn.execute(DropConstraint(fkc))

for table in tbs:
    conn.execute(DropTable(table))

trans.commit()
示例#17
0
async def delete_tables(engine: Engine, tables: list) -> None:
    """Deletes tables included in list."""
    async with engine.acquire() as conn:
        for table in reversed(tables):
            delete_table_stmt = DropTable(table.__table__)
            await conn.execute(delete_table_stmt)
示例#18
0
    await insert_weather(connection=connection,
                         table_name='weatherdeneme2',
                         table_values=('San Francisco', 43, 57, 0, 0, '1994-11-29'))


loop = asyncio.get_event_loop()
loop.run_until_complete(main())

import pandas as pd
from constants import *

from sqlalchemy import create_engine
from sqlalchemy.schema import DropTable

engine = create_engine('postgresql://*****:*****@localhost:5432/acanacar')
DropTable()
df = pd.read_pickle(websocket_path / Path('outputs/data_sample_5.pickle'))
df.columns = [str.lower(col) for col in df.columns]
df.rename(columns={'datetime': 'datetime_pd', 'mydatetime': 'my_time_pd'}, inplace=True)

df.to_sql('time_series_5', engine)

result_set = engine.execute("SELECT * FROM table_name")
for result in result_set:
    print(result)

import datetime
current_milli_time = lambda: int(round(datetime.time() * 1000))


async def insert_ticker(connection, table_name='table_name'):
示例#19
0
async def drop_tables():
    async with Connection() as conn:
        await conn.execute(DropTable(tb_team))
        await conn.execute(DropTable(tb_real_team))
        await conn.execute(DropTable(tb_link))
        await conn.execute("DROP TYPE status_team;")
示例#20
0
文件: __init__.py 项目: norsig/tracim
def teardown_db():
    """Destroy the database schema."""
    engine = config['tg.app_globals'].sa_engine
    connection = engine.connect()

    # INFO - D.A. - 2014-12-04
    # Recipe taken from bitbucket:
    # https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything

    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData()

    tbs = []
    all_fks = []
    views = []

    # INFO - D.A. - 2014-12-04
    # Sequences are hard defined here because SQLA does not allow to reflect them from existing schema
    seqs = [
        Sequence('seq__groups__group_id'),
        Sequence('seq__contents__content_id'),
        Sequence('seq__content_revisions__revision_id'),
        Sequence('seq__permissions__permission_id'),
        Sequence('seq__users__user_id'),
        Sequence('seq__workspaces__workspace_id')
    ]

    for view_name in inspector.get_view_names():
        v = Table(view_name, metadata)
        views.append(v)

    for table_name in inspector.get_table_names():

        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    if not config['sqlalchemy.url'].startswith('sqlite'):
        for fkc in all_fks:
            connection.execute(DropConstraint(fkc))

    for view in views:
        drop_statement = 'DROP VIEW {}'.format(view.name)
        # engine.execute(drop_statement)
        connection.execute(drop_statement)

    for table in tbs:
        connection.execute(DropTable(table))

    for sequence in seqs:
        try:
            connection.execute(DropSequence(sequence))
        except Exception as e:
            logger.debug(
                teardown_db,
                'Exception while trying to remove sequence {}'.format(
                    sequence.name))

    transaction.commit()
    connection.close()
    engine.dispose()
示例#21
0
 def drop_user_table(self):
     if self.engine.has_table("users"):
         print("Dropping users table...")
         t = Table("users", self.meta)
         self.connection.execute(DropTable(t))
示例#22
0
users = Table(
    "users",
    metadata,
    Column("id", Integer, primary_key=True),
    Column("name", Text),
)


async def insert_users(u):
    # Insert some users
    await u.execute(users.insert().values(name="Jeremy Goodwin"))
    await u.execute(users.insert().values(name="Natalie Hurley"))
    await u.execute(users.insert().values(name="Dan Rydell"))
    await u.execute(users.insert().values(name="Casey McCall"))
    await u.execute(users.insert().values(name="Dana Whitaker"))


with sa.connection() as conn:
    newloop = conn.get_loop()  # get the running loop
    result, error = conn.test_connection()
    # Create the table
    result, error = newloop.run_until_complete(conn.execute(
        CreateTable(users)))
    # Insert some users
    newloop.run_until_complete(insert_users(conn))
    result, error = newloop.run_until_complete(
        conn.query(users.select(users.c.name.startswith("D"))))
    print(result)
    # Drop the table
    result, error = newloop.run_until_complete(conn.execute(DropTable(users)))
示例#23
0
 def drop_tx_table(self):
     if self.engine.has_table("transactions"):
         print("Dropping transaction table...")
         t = Table("transactions", self.meta)
         self.connection.execute(DropTable(t))
示例#24
0
 def drop_with_cascade(self):
     for table_name in db.metadata.tables.keys():
         DropTable(table_name)
示例#25
0
    def handle_noargs(self, **options):
        verbosity = 1 #int(options.get('verbosity'))
        interactive = options.get('interactive')
        show_traceback = options.get('traceback')

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError as exc:
                # This is slightly hackish. We want to ignore ImportErrors
                # if the "management" module itself is missing -- but we don't
                # want to ignore the exception if the management module exists
                # but raises an ImportError for some reason. The only way we
                # can do this is to check the text of the exception. Note that
                # we're a bit broad in how we check the text, because different
                # Python implementations may not use the same text.
                # CPython uses the text "No module named management"
                # PyPy uses "No module named myproject.myapp.management"
                msg = exc.args[0]
                if not msg.startswith('No module named') or 'management' not in msg:
                    raise

        db = options.get('database')
        orm = ORM.get(db)
        db_info = orm.settings_dict
        is_test_db = db_info.get('TEST', False)
        if not is_test_db:
            print 'Database "%s" cannot be purged because it is not a test ' \
                  'database.\nTo flag this as a test database, set TEST to ' \
                  'True in the database settings.' % db
            sys.exit()

        if interactive:
            confirm = raw_input('\nYou have requested a purge of database ' \
                '"%s" (%s). This will IRREVERSIBLY DESTROY all data ' \
                'currently in the database, and DELETE ALL TABLES AND ' \
                'SCHEMAS. Are you sure you want to do this?\n\n' \
                'Type "yes" to continue, or "no" to cancel: ' \
                % (db, orm.engine.url))
        else:
            confirm = 'yes'

        if confirm == 'yes':
            # get a list of all schemas used by the app
            default_schema = orm.engine.url.database
            app_schemas = set(orm.Base.metadata._schemas)
            app_schemas.add(default_schema)

            url = deepcopy(orm.engine.url)
            url.database = None
            engine = create_engine(url)
            inspector = inspect(engine)

            # get a list of existing schemas
            db_schemas = set(inspector.get_schema_names())

            schemas = app_schemas.intersection(db_schemas)

            app_tables = set()
            for table in orm.Base.metadata.tables.values():
                schema = table.schema or default_schema
                app_tables.add('%s.%s' % (schema, table.name))

            metadata = MetaData()
            db_tables = []
            all_fks = []

            for schema in schemas:
                for table_name in inspector.get_table_names(schema):
                    fullname = '%s.%s' % (schema, table_name)
                    if fullname not in app_tables:
                        continue
                    fks = []
                    for fk in inspector.get_foreign_keys(table_name, schema=schema):
                        if not fk['name']:
                            continue
                        fks.append(ForeignKeyConstraint((),(),name=fk['name']))
                    t = Table(table_name, metadata, *fks, schema=schema)
                    db_tables.append(t)
                    all_fks.extend(fks)

            session = Session(bind=engine)
            for fkc in all_fks:
                session.execute(DropConstraint(fkc))
            for table in db_tables:
                session.execute(DropTable(table))
            for schema in schemas:
                session.execute(DropSchema(schema))
            session.commit()
            session.bind.dispose()

        else:
            self.stdout.write("Purge cancelled.\n")
示例#26
0
 def delete_table(self, table_name):
     DropTable(table_name)