def drop_all(session): inspector = reflection.Inspector.from_engine(session.bind) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: session.execute(DropConstraint(fkc)) for table in tbs: session.execute(DropTable(table)) session.commit()
def db_drop_everything(db): # source: https://www.mbeckler.org/blog/?p=218 # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything conn = db.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def delete_all_tables(db): """Drop all tables in the database.""" conn = db.engine.connect() transaction = conn.begin() inspector = inspect(db.engine) metadata = MetaData() all_schema_tables = get_all_tables(db) tables = [] all_fkeys = [] for schema, schema_tables in all_schema_tables.items(): for table_name in schema_tables: fkeys = [ForeignKeyConstraint((), (), name=fk['name']) for fk in inspector.get_foreign_keys(table_name, schema=schema) if fk['name']] tables.append(Table(table_name, metadata, *fkeys, schema=schema)) all_fkeys.extend(fkeys) for fkey in all_fkeys: conn.execute(DropConstraint(fkey)) for table in tables: conn.execute(DropTable(table)) for schema in all_schema_tables: if schema != 'public': row = conn.execute(''' SELECT 'DROP FUNCTION ' || ns.nspname || '.' || proname || '(' || oidvectortypes(proargtypes) || ')' FROM pg_proc INNER JOIN pg_namespace ns ON (pg_proc.pronamespace = ns.oid) WHERE ns.nspname = '{}' order by proname; '''.format(schema)) for stmt, in row: conn.execute(stmt) conn.execute(DropSchema(schema)) transaction.commit()
def delete_all_tables(db): """Drops all tables in the database""" conn = db.engine.connect() transaction = conn.begin() inspector = Inspector.from_engine(db.engine) metadata = MetaData() all_schema_tables = get_all_tables(db) tables = [] all_fkeys = [] for schema, schema_tables in all_schema_tables.iteritems(): for table_name in schema_tables: fkeys = [ForeignKeyConstraint((), (), name=fk['name']) for fk in inspector.get_foreign_keys(table_name, schema=schema) if fk['name']] tables.append(Table(table_name, metadata, *fkeys, schema=schema)) all_fkeys.extend(fkeys) for fkey in all_fkeys: conn.execute(DropConstraint(fkey)) for table in tables: conn.execute(DropTable(table)) for schema in all_schema_tables: if schema != 'public': conn.execute(DropSchema(schema)) transaction.commit()
def drop_everything(self): conn = self.engine.connect() trans = conn.begin() inspector = reflection.Inspector.from_engine(self.engine) metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def db(app): # pylint: disable=redefined-outer-name, invalid-name """Return a session-wide initialised database. Drops all existing tables - Meta follows Postgres FKs """ with app.app_context(): # Clear out any existing tables metadata = MetaData(_db.engine) metadata.reflect() for table in metadata.tables.values(): for fk in table.foreign_keys: # pylint: disable=invalid-name _db.engine.execute(DropConstraint(fk.constraint)) metadata.drop_all() _db.drop_all() sequence_sql = """SELECT sequence_name FROM information_schema.sequences WHERE sequence_schema='public' """ sess = _db.session() for seq in [name for (name, ) in sess.execute(text(sequence_sql))]: try: sess.execute(text('DROP SEQUENCE public.%s ;' % seq)) print('DROP SEQUENCE public.%s ' % seq) except Exception as err: # pylint: disable=broad-except # noqa B902 print(f'Error: {err}') sess.commit() # ############################################ # There are 2 approaches, an empty database, or the same one that the app will use # create the tables # _db.create_all() # or # Use Alembic to load all of the DB revisions including supporting lookup data # This is the path we'll use in legal_api!! # even though this isn't referenced directly, it sets up the internal configs that upgrade needs namex_api_dir = os.path.abspath('..').replace('services', 'api') namex_api_dir = os.path.join(namex_api_dir, 'migrations') Migrate(app, _db, namex_api_dir) upgrade() return _db
def db_DropEverything(self): print "Sroping everything" conn = self.engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(self.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc, cascade=True)) for table in tbs: if table.name != tab_datasets and table.name != tab_resourcesinfo: conn.execute(DropTable(table)) trans.commit()
def db_drop_everything(db): # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything conn = db.engine.connect() # the transaction only applies if the DB supports # transactional DDL, i.e. Postgresql, MS SQL Server trans = conn.begin() inspector = reflection.Inspector.from_engine(db.engine) # gather all data first before dropping anything. # some DBs lock after things have been dropped in # a transaction. metadata = MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk['name']: continue fks.append(ForeignKeyConstraint((), (), name=fk['name'])) t = Table(table_name, metadata, *fks) tbs.append(t) all_fks.extend(fks) for fkc in all_fks: conn.execute(DropConstraint(fkc)) for table in tbs: conn.execute(DropTable(table)) trans.commit()
def create_book(sqlite_file=None, uri_conn=None, currency="EUR", overwrite=False, keep_foreign_keys=False, db_type=None, db_user=None, db_password=None, db_name=None, db_host=None, db_port=None, version_format="2.6", **kwargs): """Create a new empty GnuCash book. If both sqlite_file and uri_conn are None, then an "in memory" sqlite book is created. :param str sqlite_file: a path to an sqlite3 file (only used if uri_conn is None) :param str uri_conn: a sqlalchemy connection string :param str currency: the ISO symbol of the default currency of the book :param bool overwrite: True if book should be deleted and recreated if it exists already :param bool keep_foreign_keys: True if the foreign keys should be kept (may not work at all with GnuCash) :param str db_type: type of database in ["postgres","mysql"] :param str db_user: username of database :param str db_password: password for the use of database :param str db_name: name of database :param str db_host: host of database :param str db_port: port of database :param str version_format: the format (2.6 or 2.7) for the schema tables to generate :return: the document as a gnucash session :rtype: :class:`GncSession` :raises GnucashException: if document already exists and overwrite is False """ from sqlalchemy_utils.functions import database_exists, create_database, drop_database uri_conn = build_uri(sqlite_file, uri_conn, db_type, db_user, db_password, db_name, db_host, db_port) # create database (if DB is not a sqlite in memory) if uri_conn != "sqlite:///:memory:": if database_exists(uri_conn): if overwrite: drop_database(uri_conn) else: raise GnucashException( "'{}' db already exists".format(uri_conn)) create_database(uri_conn) engine = create_piecash_engine(uri_conn, **kwargs) # drop constraints if we de not want to keep them (keep_foreign_keys=False), the default if not keep_foreign_keys: for n, tbl in DeclarativeBase.metadata.tables.items(): # drop index constraints for idx in tbl.indexes: event.listen(tbl, "after_create", DropIndex(idx), once=True) # drop FK constraints for cstr in tbl.constraints: if isinstance(cstr, PrimaryKeyConstraint): continue else: event.listen(tbl, "before_drop", DropConstraint(cstr), once=True) # # create all (tables, fk, ...) DeclarativeBase.metadata.create_all(engine) s = Session(bind=engine) # create all rows in version table assert version_format in version_supported, "The 'version_format'={} is not supported. " \ "Choose one of {}".format(version_format, list(version_supported.keys())) for table_name, table_version in version_supported[version_format].items(): s.add(Version(table_name=table_name, table_version=table_version)) # create book and merge with session b = Book() s.add(b) adapt_session(s, book=b, readonly=False) # create commodities and initial accounts from .account import Account b.root_account = Account(name="Root Account", type="ROOT", commodity=None, book=b) b.root_template = Account(name="Template Root", type="ROOT", commodity=None, book=b) b["default-currency"] = b.currencies(mnemonic=currency) b.save() return b
async def visit_foreign_key_constraint(self, constraint): if not self.dialect.supports_alter: return await self.connection.status(DropConstraint(constraint))
def create_book(sqlite_file=None, uri_conn=None, currency="EUR", overwrite=False, keep_foreign_keys=False, db_type=None, db_user=None, db_password=None, db_name=None, db_host=None, db_port=None, version_format="2.6", **kwargs): """Create a new empty GnuCash book. If both sqlite_file and uri_conn are None, then an "in memory" sqlite book is created. :param str sqlite_file: a path to an sqlite3 file (only used if uri_conn is None) :param str uri_conn: a sqlalchemy connection string :param str currency: the ISO symbol of the default currency of the book :param bool overwrite: True if book should be deleted and recreated if it exists already :param bool keep_foreign_keys: True if the foreign keys should be kept (may not work at all with GnuCash) :param str db_type: type of database in ["postgres","mysql"] :param str db_user: username of database :param str db_password: password for the use of database :param str db_name: name of database :param str db_host: host of database :param str db_port: port of database :param str version_format: the format (2.6 or 2.7) for the schema tables to generate :return: the document as a gnucash session :rtype: :class:`GncSession` :raises GnucashException: if document already exists and overwrite is False """ from sqlalchemy_utils.functions import database_exists, create_database, drop_database uri_conn = build_uri(sqlite_file, uri_conn, db_type, db_user, db_password, db_name, db_host, db_port) _db_created = False # create database (if DB is not a sqlite in memory) if uri_conn != "sqlite:///:memory:": if database_exists(uri_conn): if overwrite: drop_database(uri_conn) else: raise GnucashException("'{}' db already exists".format(uri_conn)) create_database(uri_conn) _db_created = True engine = create_piecash_engine(uri_conn, **kwargs) # Do any special setup we need to do the first time the database is created if _db_created: # For postgresql, GnuCash needs the standard_conforming_strings database variable set to 'on' in order to # find the gnclock table as expected. (Probably would break some other stuff too.) match = re.match('postgres://([^:]+):([^@]+)@([^/]+)/(.+)', uri_conn) if match: # TODO: figure out how to use sqlalchemy.sql.expression.literal to make this slightly SQL injection safer. # t = text('ALTER DATABASE :db_name SET standard_conforming_string TO on') # engine.execute(t, db_name="blah") # produces: ALTER DATABASE 'blah' SET standard_conforming_string TO on # we need: ALTER DATABASE blah SET standard_conforming_string TO on t = text('ALTER DATABASE {} SET standard_conforming_strings TO on'.format(match.group(4))) engine.execute(t) # drop constraints if we de not want to keep them (keep_foreign_keys=False), the default if not keep_foreign_keys: for n, tbl in DeclarativeBase.metadata.tables.items(): # drop index constraints for idx in tbl.indexes: event.listen(tbl, "after_create", DropIndex(idx), once=True) # drop FK constraints for cstr in tbl.constraints: if isinstance(cstr, PrimaryKeyConstraint): continue else: event.listen(tbl, "before_drop", DropConstraint(cstr), once=True) # # create all (tables, fk, ...) DeclarativeBase.metadata.create_all(engine) s = Session(bind=engine) # create all rows in version table assert version_format in version_supported, "The 'version_format'={} is not supported. " \ "Choose one of {}".format(version_format, list(version_supported.keys())) for table_name, table_version in version_supported[version_format].items(): s.add(Version(table_name=table_name, table_version=table_version)) # create book and merge with session b = Book() s.add(b) adapt_session(s, book=b, readonly=False) # create commodities and initial accounts from .account import Account b.root_account = Account(name="Root Account", type="ROOT", commodity=None, book=b) b.root_template = Account(name="Template Root", type="ROOT", commodity=None, book=b) b["default-currency"] = b.currencies(mnemonic=currency) b.save() s.create_lock() b._acquire_lock = True return b
def create_book( sqlite_file=None, uri_conn=None, currency="EUR", overwrite=False, keep_foreign_keys=False, db_type=None, db_user=None, db_password=None, db_name=None, db_host=None, db_port=None, check_same_thread=True, pg_template="template0", **kwargs ): """Create a new empty GnuCash book. If both sqlite_file and uri_conn are None, then an "in memory" sqlite book is created. :param str sqlite_file: a path to an sqlite3 file (only used if uri_conn is None) :param str uri_conn: a sqlalchemy connection string :param str currency: the ISO symbol of the default currency of the book :param bool overwrite: True if book should be deleted and recreated if it exists already :param bool keep_foreign_keys: True if the foreign keys should be kept (may not work at all with GnuCash) :param str db_type: type of database in ["postgres","mysql"] :param str db_user: username of database :param str db_password: password for the use of database :param str db_name: name of database :param str db_host: host of database :param int db_port: port of database :param bool check_same_thread: sqlite flag that restricts connection use to the thread that created (see False for use in ipython/flask/... but read first https://docs.python.org/3/library/sqlite3.html) :param str pg_template: the postgres template to use when creating the database. One of template1 or template0 (default template0). Irrelevant for other databases than postgres. :return: the document as a gnucash session :rtype: :class:`GncSession` :raises GnucashException: if document already exists and overwrite is False """ from sqlalchemy_utils.functions import ( database_exists, create_database, drop_database, ) VERSION_FORMAT = "3.0" uri_conn = build_uri( sqlite_file, uri_conn, db_type, db_user, db_password, db_name, db_host, db_port, check_same_thread, ) # create database (if DB is not a sqlite in memory) if uri_conn != "sqlite:///:memory:": if database_exists(uri_conn): if overwrite: drop_database(uri_conn) else: raise GnucashException("'{}' db already exists".format(uri_conn)) create_database(uri_conn, template=pg_template) engine = create_piecash_engine(uri_conn, **kwargs) # drop constraints if we de not want to keep them (keep_foreign_keys=False), the default if not keep_foreign_keys: for n, tbl in DeclarativeBase.metadata.tables.items(): # drop index constraints for idx in tbl.indexes: if idx.name.startswith("ix_") or idx.name.startswith("_"): event.listen(tbl, "after_create", DropIndex(idx), once=True) # drop FK constraints for cstr in tbl.constraints: if isinstance(cstr, PrimaryKeyConstraint): continue else: event.listen(tbl, "before_drop", DropConstraint(cstr), once=True) # # create all (tables, fk, ...) DeclarativeBase.metadata.create_all(engine) s = Session(bind=engine) # create all rows in version table assert ( VERSION_FORMAT in version_supported ), "The 'version_format'={} is not supported. " "Choose one of {}".format( VERSION_FORMAT, list(version_supported.keys()) ) for table_name, table_version in version_supported[VERSION_FORMAT].items(): s.add(Version(table_name=table_name, table_version=table_version)) # create book and merge with session b = Book() s.add(b) adapt_session(s, book=b, readonly=False) # create commodities and initial accounts from .account import Account b.root_account = Account( name="Root Account", type="ROOT", commodity=factories.create_currency_from_ISO(currency), book=b, ) b.root_template = Account(name="Template Root", type="ROOT", commodity=None, book=b) b.save() return b
def create_book(sqlite_file=None, uri_conn=None, currency="EUR", overwrite=False, keep_foreign_keys=False, **kwargs): """Create a new empty GnuCash book. If both sqlite_file and uri_conn are None, then an "in memory" sqlite book is created. :param str sqlite_file: a path to an sqlite3 file :param str uri_conn: a sqlalchemy connection string :param str currency: the ISO symbol of the default currency of the book :param bool overwrite: True if book should be deleted and recreated if it exists already :param bool keep_foreign_keys: True if the foreign keys should be kept (may not work at all with GnuCash) :return: the document as a gnucash session :rtype: :class:`GncSession` :raises GnucashException: if document already exists and overwrite is False """ from sqlalchemy_utils.functions import database_exists, create_database, drop_database if uri_conn is None: if sqlite_file: uri_conn = "sqlite:///{}".format(sqlite_file) else: uri_conn = "sqlite:///:memory:" # create database (if DB is not a sqlite in memory) if uri_conn != "sqlite:///:memory:": if database_exists(uri_conn): if overwrite: drop_database(uri_conn) else: raise GnucashException( "'{}' db already exists".format(uri_conn)) create_database(uri_conn) engine = create_engine(uri_conn, **kwargs) # create all (tables, fk, ...) DeclarativeBase.metadata.create_all(engine) # remove all foreign keys if not keep_foreign_keys: for fk in get_foreign_keys(DeclarativeBase.metadata, engine): if fk.name: engine.execute(DropConstraint(fk)) # start session to create initial objects s = Session(bind=engine) # create all rows in version table for table_name, table_version in version_supported.items(): s.add(Version(name=table_name, version=table_version)) # create Book and initial accounts from .account import Account gnc_s = GncSession(s) # create the Book with the 2 root accounts b = Book( root_account=Account(name="Root Account", type="ROOT", commodity=None), root_template=Account(name="Template Root", type="ROOT", commodity=None), ) s.add(b) s.commit() # create the currency gnc_s.book.create_currency_from_ISO(currency) s.commit() return gnc_s