コード例 #1
0
ファイル: api.py プロジェクト: rkoopmann/google_classroom
 def _drop_table(self):
     """
     Deletes the connected table related to this class.
     Drops rather than truncates to allow for easy schema changes without migrating.
     """
     try:
         table = self.sql.table(self.table_name)
         self.sql.engine.execute(DropTable(table))
     except NoSuchTableError as error:
         logging.debug(f"{error}: Attempted deletion, but no table exists.")
コード例 #2
0
ファイル: db.py プロジェクト: ivicac/sync-engine
def drop_everything(engine, keep_tables=None, reset_columns=None):
    """ Drops all tables in the db unless their name is in `keep_tables`.
        `reset_columns` is used to specify the columns that should be reset to
        default value in the tables that we're keeping -
        provided as a dict of table_name: list_of_column_names.
    """

    keep_tables = keep_tables or []
    reset_columns = reset_columns or {}
    conn = engine.connect()
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        if table_name in keep_tables:
            # Reset certain columns in certain tables we're keeping
            if table_name in reset_columns:
                t = Table(table_name, metadata)

                column_names = reset_columns[table_name]
                for c in inspector.get_columns(table_name):
                    if c["name"] in column_names:
                        assert c["default"]

                        q = "UPDATE {0} SET {1}={2};".format(
                            table_name, c["name"], c["default"])
                        conn.execute(q)
            continue

        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk["name"]:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk["name"]))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
コード例 #3
0
async def statement(saconnection):
    statement = sa.Table(
        "statement",
        sa.MetaData(),  # NOQA
        sa.Column("id", sa.Integer, primary_key=True),
        sa.Column("owner", sa.String(255)),
        sa.Column("balance", sa.Integer),
    )
    await saconnection.execute(CreateTable(statement))
    await yield_(statement)
    await saconnection.execute(DropTable(statement))
コード例 #4
0
ファイル: __init__.py プロジェクト: Nuqlear/vobla
 async def recreate_tables(self):
     async with self.pg.acquire() as conn:
         for table in metadata.tables.values():
             drop_expr = DropTable(table)
             try:
                 await conn.execute(drop_expr)
             except psycopg2.ProgrammingError:
                 pass
     async with self.pg.acquire() as conn:
         for table in metadata.tables.values():
             create_expr = CreateTable(table)
             await conn.execute(create_expr)
コード例 #5
0
    def inject_repo_data_into_db(self, models_list):
        """Generate the Package entries that didn't previously exist.

        Contrary to SWHListerBase, we don't actually insert the data in
        database. `create_missing_origins_and_tasks` does it once we have the
        origin and task identifiers.
        """
        by_name_version = {}
        temp_packages = []

        area_id = self.area.id

        for model in models_list:
            name = model['name']
            version = model['version']
            temp_packages.append({
                'area_id': area_id,
                'name': name,
                'version': version,
            })
            by_name_version[name, version] = model

        # Add all the listed packages to a temporary table
        self.db_session.execute(CreateTable(TempPackage.__table__))
        self.db_session.bulk_insert_mappings(TempPackage, temp_packages)

        def exists_tmp_pkg(db_session, model):
            return (db_session.query(model).filter(
                Package.area_id == TempPackage.area_id).filter(
                    Package.name == TempPackage.name).filter(
                        Package.version == TempPackage.version).exists())

        # Filter out the packages that already exist in the main Package table
        new_packages = self.db_session\
                           .query(TempPackage)\
                           .options(load_only('name', 'version'))\
                           .filter(~exists_tmp_pkg(self.db_session, Package))\
                           .all()

        self.old_area_packages = self.db_session.query(Package).filter(
            exists_tmp_pkg(self.db_session, TempPackage)).all()

        self.db_session.execute(DropTable(TempPackage.__table__))

        added_packages = []
        for package in new_packages:
            model = by_name_version[package.name, package.version]

            added_packages.append(Package(area=self.area, **model))

        self.db_session.add_all(added_packages)
        return added_packages
コード例 #6
0
ファイル: drop_data.py プロジェクト: alkadis/vcv
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('-f',
                        dest='force',
                        default=False,
                        action='store_true',
                        help="force deletion without asking for confirmation")
    args = parser.parse_args()

    if not args.force:
        input = raw_input('Delete all data? No backup will be done! '
                          'If so type "yes": ')
        if input != 'yes':
            print 'Answer not "yes", but: "%s"\nAborting.' % input
            exit(1)

    config = config_from_args(args)
    engine = get_engine(config, echo=True)
    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
コード例 #7
0
async def drop_tables(conn: SAConnection) -> None:

    for table in reversed(tables):
        try:
            await conn.execute(DropTable(table))
        except psycopg2.ProgrammingError:
            pass

    for enum in enums:
        try:
            await conn.execute(DropEnumType(enum))
        except psycopg2.ProgrammingError:
            pass
コード例 #8
0
ファイル: TestKey.py プロジェクト: pSCANNER/USC-source-code
 def make_key_table(self, measure, source_schema):
     stable = Table(self.key_table_name(measure), self.meta,
                    schema=source_schema, autoload=True)
     dtable = Table(stable.name, self.meta, schema=self.key_schema)
     cnames=[]
     for c in stable.columns:
         dtable.append_column(Column(c.name, c.type))
         cnames.append(c.name)
     print(self.terminated_statement(str(DropTable(dtable, bind=self.engine))))
     print(self.terminated_statement(str(CreateTable(dtable, bind=self.engine))))
     query = select(stable.c)
     ins = dtable.insert().from_select(cnames, query)
     print(self.terminated_statement(sql_to_string(ins)))
コード例 #9
0
 def create_or_alter_table(self,
                           table: Table,
                           with_index: bool = False) -> List[DDLElement]:
     files = []
     # we need "none" because "action" doesn't yet pick up if satellites
     # have changed, but if they have some tables need to be recreated
     if self.action in (VaultAction.ALTER, VaultAction.DROP,
                        VaultAction.NONE):
         files += [DropTable(table)]
     if self.action in (VaultAction.ALTER, VaultAction.CREATE,
                        VaultAction.NONE):
         files += BaseModel.create_table(table, with_index)
     return files
コード例 #10
0
 def cleanup_tables(cls, engine):
     # reflect and delete all tables, not just those known to
     # our current code version / models
     metadata = MetaData()
     inspector = inspect(engine)
     tables = []
     with engine.connect() as conn:
         trans = conn.begin()
         for t in inspector.get_table_names():
             tables.append(Table(t, metadata))
         for t in tables:
             conn.execute(DropTable(t))
         trans.commit()
コード例 #11
0
ファイル: db.py プロジェクト: MeverikR/portal
async def drop_table(pg, table):
    for _ in tables:
        if _.name == table:
            table = _
    async with pg.acquire() as conn:
        try:
            create_expr = DropTable(table)
            await conn.execute(create_expr)
        except psycopg2.ProgrammingError as pe:
            # если таблички уже созданы будет ошибка
            print(str(pe))
            return False
        return True
コード例 #12
0
ファイル: conftest.py プロジェクト: tukva/sanic-docker
async def drop_tables():
    async with Connection() as conn:
        await conn.execute(DropTable(SSOModels.group_permission))
        await conn.execute(DropTable(SSOModels.user_group))
        await conn.execute(DropTable(SSOModels.user))
        await conn.execute(DropTable(SSOModels.group))
        await conn.execute(DropTable(SSOModels.session))
        await conn.execute(DropTable(SSOModels.permission))
コード例 #13
0
ファイル: test_sa_types.py プロジェクト: isabella232/aiopg
 def go(**kwargs):
     engine = yield from make_engine(**kwargs)
     with (yield from engine) as conn:
         try:
             yield from conn.execute(DropTable(tbl))
         except psycopg2.ProgrammingError:
             pass
         try:
             yield from conn.execute(DropTable(tbl2))
         except psycopg2.ProgrammingError:
             pass
         yield from conn.execute("DROP TYPE IF EXISTS simple_enum CASCADE;")
         yield from conn.execute("""CREATE TYPE simple_enum AS ENUM
                                    ('first', 'second');""")
         try:
             yield from conn.execute(CreateTable(tbl))
             ret_tbl = tbl
             has_hstore = True
         except psycopg2.ProgrammingError:
             yield from conn.execute(CreateTable(tbl2))
             ret_tbl = tbl2
             has_hstore = False
     return engine, ret_tbl, has_hstore
コード例 #14
0
async def preapre_tables(pg):
    tables = [db.question, db.choice]
    async with pg.acquire() as conn:
        for table in reversed(tables):
            drop_expr = DropTable(table)
            try:
                await conn.execute(drop_expr)
            except psycopg2.ProgrammingError:
                pass

    async with pg.acquire() as conn:
        for table in tables:
            create_expr = CreateTable(table)
            await conn.execute(create_expr)
コード例 #15
0
async def _drop_tables(engine, tables):
    """
    Asynchronous function for dropping tables in database.

    :param Engine engine: engine of database
    :param list tables: list of tables to dropping
    """
    async with engine.acquire() as connection:
        for table in reversed(tables):
            drop_query = DropTable(table)
            try:
                await connection.execute(drop_query)
            except psycopg2.ProgrammingError:
                pass
コード例 #16
0
def db_drop_all(db):
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []

        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

    db.engine.execute("DROP TABLE IF EXISTS alembic_version CASCADE")
    db.engine.execute("DROP SEQUENCE IF EXISTS requests_id_seq CASCADE")

    sequences = [
        'assignee_type', 'frequencytypesenum', 'requeststatusenum',
        'schedulestatusenum', 'hotdeskrequeststatusenum', 'frequencyenum',
        'statusenum', 'parenttype'
    ]

    sequences_ = ','.join(sequences)
    sql = f'DROP TYPE IF EXISTS {sequences_} CASCADE'
    db.engine.execute(sql)
コード例 #17
0
def drop_db():
  from sqlalchemy.engine import reflection
  from sqlalchemy import create_engine
  from sqlalchemy.schema import (
      MetaData,
      Table,
      DropTable,
      ForeignKeyConstraint,
      DropConstraint,
      )
  
  engine = create_engine(ENGINE_STRING)
  
  conn = engine.connect()
  
  # the transaction only applies if the DB supports
  # transactional DDL, i.e. Postgresql, MS SQL Server
  trans = conn.begin()
  
  inspector = reflection.Inspector.from_engine(engine)
  
  # gather all data first before dropping anything.
  # some DBs lock after things have been dropped in 
  # a transaction.
  
  metadata = MetaData()
  
  tbs = []
  all_fks = []
  
  for table_name in inspector.get_table_names():
      fks = []
      for fk in inspector.get_foreign_keys(table_name):
          if not fk['name']:
              continue
          fks.append(
              ForeignKeyConstraint((),(),name=fk['name'])
              )
      t = Table(table_name,metadata,*fks)
      tbs.append(t)
      all_fks.extend(fks)
  
  for fkc in all_fks:
      conn.execute(DropConstraint(fkc))
  
  for table in tbs:
      conn.execute(DropTable(table))
  
  trans.commit()
コード例 #18
0
def table_ddl(tables, engine, drop=False):

    output = []

    for table in tables:

        if not drop:
            ddl = CreateTable(table)
        else:
            ddl = DropTable(table)

        output.append(str(ddl.compile(dialect=engine.dialect)).strip())
        output.append(';\n\n')

    return output
コード例 #19
0
async def test_create_table(sa_connect):
    conn = await sa_connect()
    res = await conn.execute(DropTable(tbl))
    with pytest.raises(sa.ResourceClosedError):
        await res.fetchmany()

    with pytest.raises(aiomysql.ProgrammingError):
        await conn.execute("SELECT * FROM sa_tbl")

    res = await conn.execute(CreateTable(tbl))
    with pytest.raises(sa.ResourceClosedError):
        await res.fetchmany()

    res = await conn.execute("SELECT * FROM sa_tbl")
    assert 0 == len(await res.fetchall())
コード例 #20
0
        async def go():
            conn = await self.connect()
            res = await conn.execute(DropTable(tbl))
            with self.assertRaises(sa.ResourceClosedError):
                await res.fetchmany()

            with self.assertRaises(aiomysql.ProgrammingError):
                await conn.execute("SELECT * FROM sa_tbl")

            res = await conn.execute(CreateTable(tbl))
            with self.assertRaises(sa.ResourceClosedError):
                await res.fetchmany()

            res = await conn.execute("SELECT * FROM sa_tbl")
            self.assertEqual(0, len(await res.fetchall()))
コード例 #21
0
ファイル: test_sa_connection.py プロジェクト: free-free/aiopg
def test_create_table(connect):
    conn = yield from connect()
    res = yield from conn.execute(DropTable(tbl))
    with pytest.raises(sa.ResourceClosedError):
        yield from res.fetchmany()

    with pytest.raises(psycopg2.ProgrammingError):
        yield from conn.execute("SELECT * FROM sa_tbl")

    res = yield from conn.execute(CreateTable(tbl))
    with pytest.raises(sa.ResourceClosedError):
        yield from res.fetchmany()

    res = yield from conn.execute("SELECT * FROM sa_tbl")
    assert 0 == len(list(res))
コード例 #22
0
        def go():
            conn = yield from self.connect()
            res = yield from conn.execute(DropTable(tbl))
            with self.assertRaises(sa.ResourceClosedError):
                yield from res.fetchmany()

            with self.assertRaises(psycopg2.ProgrammingError):
                yield from conn.execute("SELECT * FROM sa_tbl")

            res = yield from conn.execute(CreateTable(tbl))
            with self.assertRaises(sa.ResourceClosedError):
                yield from res.fetchmany()

            res = yield from conn.execute("SELECT * FROM sa_tbl")
            self.assertEqual(0, len(list(res)))
コード例 #23
0
ファイル: dump.py プロジェクト: iynaix/pathofexile
def destroy_database(engine):
    """
    completely destroys the database, copied from

    http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything
    """
    from sqlalchemy.engine import reflection
    from sqlalchemy.schema import (
        MetaData,
        Table,
        DropTable,
        ForeignKeyConstraint,
        DropConstraint,
    )

    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
コード例 #24
0
ファイル: db.py プロジェクト: kids-first/kf-model-omop
def drop_tables(config_name=None):
    """
    Drop all tables despite existing constraints

    Source https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything # noqa E501

    :param config_name: a dict key which specifies which Config class to select
    in config.config dict. The Config class encapsulates all db parameters such
    as user, pw, host, port, and name of the db. See config.py for more info.
    """
    config = _select_config(config_name)

    engine = create_engine(config.SQLALCHEMY_DATABASE_URI)

    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
コード例 #25
0
def test_create_table(connect):
    conn = yield from connect()
    res = yield from conn.execute(DropTable(tbl))
    with pytest.raises(sa.ResourceClosedError):
        yield from res.fetchmany()

    with pytest.raises(aiosqlite3.OperationalError):
        yield from conn.execute("SELECT * FROM sa_tbl")

    res = yield from conn.execute(CreateTable(tbl))
    with pytest.raises(sa.ResourceClosedError):
        yield from res.fetchmany()

    res = yield from conn.execute("SELECT * FROM sa_tbl")
    data = yield from async_res_list(res)
    assert 0 == len(data)
コード例 #26
0
def drop_all(engine):
    # this is a special drop all hardcoded to the workings of fixture.py,
    # since Akiban won't let us drop constraints separately

    from sqlalchemy.engine import reflection
    from sqlalchemy.schema import DropTable
    from sqlalchemy.sql import table

    inspector = reflection.Inspector.from_engine(engine)

    to_drop = set()
    for table_name in inspector.get_table_names():
        if re.match(r'^[abcdefghijk]\d?$', table_name):
            to_drop.add(table_name)

    for tname in reversed(sorted(to_drop)):
        engine.execute(DropTable(table(tname)))
コード例 #27
0
async def delete_tables(pg, tables, verbose=True):
    """
    Delete tables from DB before creating new version
    :param pg: connect to DB engine(PostgreSQL)
    :param tables: tables from models.py
    :param verbose: set logs
    :return: None
    """
    async with pg.acquire() as conn:
        for table in reversed(tables):
            drop_expr = DropTable(table)
            try:
                await conn.execute(drop_expr)
                if verbose:
                    logger.debug('DB_DELETE: %s' % table)
            except psycopg2.ProgrammingError as e:
                logger.error('DB_DELETE: %s' % e)
コード例 #28
0
async def test_fetch_content(test_input, expected_entries, queue_factory,
                             loop):
    queue = queue_factory(loop)
    limit = 2
    test_method = HeadHunter(test_input)
    mytask_1 = asyncio.create_task(test_method.get_links(
        queue, limit))  # проводим пока все таски не будут выполнены
    await mytask_1
    mytask_2 = asyncio.create_task(test_method.fetch_content(
        queue))  # проводим пока все таски не будут выполнены
    await mytask_2
    async with engine.connect() as conn:
        data_object = await conn.execute(HeadHunter_db.select())
        current_data = await data_object.fetchall()
    actual_entries = len(current_data)
    await engine.execute(DropTable(HeadHunter_db))
    assert expected_entries == actual_entries
コード例 #29
0
 def connect(self, **kwargs):
     engine = yield from sa.create_engine(database='aiopg',
                                          user='******',
                                          password='******',
                                          host='127.0.0.1',
                                          loop=self.loop,
                                          **kwargs)
     with (yield from engine) as conn:
         try:
             yield from conn.execute(DropTable(tbl))
         except psycopg2.ProgrammingError:
             pass
         yield from conn.execute("DROP TYPE IF EXISTS simple_enum;")
         yield from conn.execute("""CREATE TYPE simple_enum AS ENUM
                                    ('first', 'second');""")
         yield from conn.execute(CreateTable(tbl))
     return engine
コード例 #30
0
def drop_database_tables(dburi):
    engine = create_engine(dburi, echo=True)
    conn = engine.connect()
    try:
        from sqlalchemy.schema import (DropTable, Table, ForeignKeyConstraint, DropConstraint, MetaData)
        from sqlalchemy.engine import reflection
        trans = conn.begin()
        inspector = reflection.Inspector.from_engine(engine)
        metadata = MetaData()
        tbs = []
        all_fks = []
        for table_name in inspector.get_table_names():
            fks = []
            for fk in inspector.get_foreign_keys(table_name):
                if not fk['name']:
                    continue
                fks.append(
                    ForeignKeyConstraint((), (), name=fk['name'])
                    )
            t = Table(table_name, metadata,*fks)
            tbs.append(t)
            all_fks.extend(fks)

        for fkc in all_fks:
            conn.execute(DropConstraint(fkc))

        for table in tbs:
            conn.execute(DropTable(table))
        trans.commit()
    except Exception, e:
        tables_list=['config' ,'user', 'deletion_queue', 'group_membership', 'group_permission', 'groups', 'permission',\
        'file', 'hidden_share', 'private_group_share','private_share', 'private_attribute_share', 'attribute', 'public_share',\
        'upload_ticket', 'user_permission', 'audit_log', 'cli_key' ,'message','message_recipient', 'session', 'user_permissions',\
        'role_membership','role_permissions', 'permissions', 'group_permissions',\
        'hidden_shares', 'messages', 'message_shares', 'user_shares', 'group_shares',\
        'public_share_files', 'public_shares', 'attribute_shares', 'upload_requests', 'attributes', 'audit_logs','files','users','roles']
        redelete_tables = []
        for table in tables_list:
            try:
                conn.execute("""DROP TABLE IF EXISTS %s""" % table)
            except sqlalchemy.exc.IntegrityError:
                redelete_tables.append(table)
        for table in redelete_tables:
            conn.execute("""DROP TABLE IF EXISTS %s""" % table)
コード例 #31
0
ファイル: database.py プロジェクト: EnTeQuAk/inyoka-legacy
 def __init__(self, *args, **kwargs):
     self.cascade = kwargs.pop('cascade', False)
     DropTable.__init__(self, *args, **kwargs)