Esempio n. 1
0
def clear_database(
    engine: Union[Engine, Connection], schemas: Iterable[str] = ()) -> None:
    """
    Clear any tables from an existing database.

    For SQLite engines, the target database file will be deleted and a new one is created in its
    place.

    :param engine: the engine or connection to use
    :param schemas: full list of schema names to expect (ignored for SQLite)

    """
    assert check_argument_types()
    if engine.dialect.name == 'sqlite':
        # SQLite does not support dropping constraints and it's faster to just delete the file
        if engine.url.database not in (None, ':memory:') and os.path.isfile(
                engine.url.database):
            os.remove(engine.url.database)
    else:
        metadatas = []
        for schema in (None, ) + tuple(schemas):
            # Reflect the schema to get the list of the tables, views and constraints
            metadata = MetaData()
            metadata.reflect(engine, schema=schema, views=True)
            metadatas.append(metadata)

        for metadata in metadatas:
            metadata.drop_all(engine, checkfirst=False)
def connection(sync_engine):
    with sync_engine.connect() as conn:
        metadata = MetaData()
        Table("table", metadata, Column("column1", Integer, primary_key=True))
        Table("table2", metadata, Column("fk_column", ForeignKey("table.column1")))
        if conn.dialect.name != "sqlite":
            conn.execute(CreateSchema("altschema"))
            Table("table3", metadata, Column("fk_column", Integer), schema="altschema")

        metadata.create_all(conn)

        yield conn

        if conn.dialect.name != "sqlite":
            metadata.drop_all(conn)
            conn.execute(DropSchema("altschema"))
def clear_database(
    engine: Engine | Connection, schemas: Iterable[str] = ()) -> None:
    """
    Clear any tables from an existing database using a synchronous connection/engine.

    :param engine: the engine or connection to use
    :param schemas: full list of schema names to expect (ignored for SQLite)

    """
    metadatas = []
    all_schemas: tuple[str | None, ...] = (None, )
    all_schemas += tuple(schemas)
    for schema in all_schemas:
        # Reflect the schema to get the list of the tables, views and constraints
        metadata = MetaData()
        metadata.reflect(engine, schema=schema, views=True)
        metadatas.append(metadata)

    for metadata in metadatas:
        metadata.drop_all(engine, checkfirst=False)
Esempio n. 4
0
class MigrateManager(object):
    def __init__(self, from_db_uri, to_db_uri, migrate_tables, **kwargs):
        self.migrate_tables = migrate_tables

        self.from_db = create_engine(from_db_uri)
        self.from_metadata = MetaData(bind=self.from_db,
                                      reflect=True,
                                      schema=kwargs.get('from_schema', None))
        self.from_session = sessionmaker(bind=self.from_db)()

        self.to_db = create_engine(to_db_uri)
        self.to_metadata = MetaData(bind=self.to_db,
                                    schema=kwargs.get('to_schema', None))
        self.to_session = sessionmaker(bind=self.to_db)()

    def init_to_metadata(self):
        tables = OrderedDict()
        for table_name in self.migrate_tables:
            table = self.from_metadata.tables[table_name]
            tables[table_name] = table
        self.to_metadata.tables = tables

    def drop_table(self):
        self.init_to_metadata()
        self.to_metadata.drop_all(bind=self.to_db)

    def create_table(self):
        self.init_to_metadata()
        print(self.to_metadata.create_all(bind=self.to_db))

    def insert_data(self):
        for table_name in self.migrate_tables:
            table = self.to_metadata.tables[table_name]
            results = self.from_session.query(table).all()
            table.metadata = self.to_metadata
            table.schema = self.to_metadata.schema
            print(results)
            for result in results:
                print(self.to_db.execute(table.insert().values(result)))
def connect_test_database(url: Union[str, URL], **engine_kwargs) -> Connection:
    """
    Connect to the given database and drop any existing tables in it.

    For SQLite URLs pointing to a file, the target database file will be deleted and a new one is
    created in its place.

    :param url: connection URL for the database
    :param engine_kwargs: additional keyword arguments passed to
        :meth:`asphalt.sqlalchemy.component.SQLAlchemyComponent.create_engine`
    :return: a connection object

    """
    assert check_argument_types()
    _context_attr, engine = SQLAlchemyComponent.configure_engine(url=url, **engine_kwargs)

    if engine.dialect.name == 'sqlite':
        # SQLite does not support dropping constraints and it's faster to just delete the file
        if engine.url.database not in (None, ':memory:') and os.path.isfile(engine.url.database):
            os.remove(engine.url.database)

        connection = engine.connect()
    else:
        # Reflect the schema to get the list of the tables and constraints left over from the
        # previous run
        connection = engine.connect()
        metadata = MetaData(connection, reflect=True)

        # Drop all the foreign key constraints so we can drop the tables in any order
        for table in metadata.tables.values():
            for fk in table.foreign_keys:
                connection.execute(DropConstraint(fk.constraint))

        # Drop the tables
        metadata.drop_all()

    return connection
Esempio n. 6
0
def engine(request, tmpdir_factory):
    engine = None
    if request.param == 'sqlite-file':
        tmpdir = tmpdir_factory.mktemp('asphalt-sqlalchemy')
        db_path = str(tmpdir.join('test.db'))
        engine = create_engine('sqlite:///' + db_path)
    elif request.param == 'sqlite-memory':
        engine = create_engine('sqlite:///:memory:')
    elif request.param == 'mysql':
        url = request.getfixturevalue('mysql_url')
        engine = create_engine(url)
    elif request.param == 'postgresql':
        url = request.getfixturevalue('postgresql_url')
        engine = create_engine(url)

    if engine.dialect.name != 'sqlite':
        engine.execute(CreateSchema('altschema'))

    if request.param != 'sqlite-memory':
        metadata = MetaData()
        Table('table', metadata, Column('column1', Integer, primary_key=True))
        Table('table2', metadata,
              Column('fk_column', ForeignKey('table.column1')))
        if engine.dialect.name != 'sqlite':
            Table('table3',
                  metadata,
                  Column('fk_column', Integer),
                  schema='altschema')

        metadata.create_all(engine)

    yield engine

    if engine.dialect.name != 'sqlite':
        metadata.drop_all(engine)
        engine.execute(DropSchema('altschema'))
Esempio n. 7
0
def drop_all_tables(settings):
    engine = engine_from_config(settings, 'sqlalchemy.')
    if 'sosilee.schema' in settings:
        schema = settings['sosilee.schema']
    metadata = MetaData(engine, schema=schema, reflect=True)
    metadata.drop_all()
Esempio n. 8
0
 def delete_schema(self, meta_data: MetaData) -> None:
     """Delete tables based on the metadata object."""
     meta_data.drop_all(bind=self.conn)