def test_backup_and_restore_sqlite(): logging.basicConfig(level=logging.DEBUG) backup_dir = tempfile.mkdtemp() try: dbfile = os.path.join(backup_dir, "test.db") dbsetup.setup(modules=[backup_test_db]) dbsetup.init('sqlite:///' + dbfile) dbsetup.create() s = session() s.add(backup_test_db.TestTable(id="1", foo="bar")) transaction.commit() rows = s.query(backup_test_db.TestTable).filter(backup_test_db.TestTable.foo == "bar").all() assert rows backup.dump_database(s, backup_dir) now = datetime.datetime.now() dump_file = os.path.join(backup_dir, "test.db.dump.{:%Y%m%d-%H%M}.gz".format(now)) assert os.path.isfile(dump_file) dbsetup.destroy() dbsetup.create() rows = s.query(backup_test_db.TestTable).all() assert not rows backup.load_database(s, dbsetup.Base.metadata, dump_file) rows = s.query(backup_test_db.TestTable).filter(backup_test_db.TestTable.foo == "bar").all() assert rows finally: shutil.rmtree(backup_dir)
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) dbsetup.setup(dbsetup.modules_from_config(settings, 'commondb.')) dbsetup.init_from_config(settings, 'sqlalchemy.') with transaction.manager: dbsetup.create()
def setUp(self): """Set up the schema clean ready to load test data into for each test. """ # This needs to be autogenerate or manage not to interfere with other # test runs that may occur simultaneously. # #dbsetup.init("sqlite:///testdata.db") # # Stick with in memory for the moment: dbsetup.init("sqlite:///:memory:", use_transaction=False) dbsetup.create() # Used so I can manipulate object returned from api, # binding them to my session. Otherwise the internal # session used is closed, and normally this would be # ok. self.session = session()
def test_backup_and_restore_sqlite(): logging.basicConfig(level=logging.DEBUG) backup_dir = tempfile.mkdtemp() try: dbfile = os.path.join(backup_dir, "test.db") dbsetup.setup(modules=[backup_test_db]) dbsetup.init('sqlite:///' + dbfile) dbsetup.create() s = session() s.add(backup_test_db.TestTable(id="1", foo="bar")) transaction.commit() rows = s.query(backup_test_db.TestTable).filter( backup_test_db.TestTable.foo == "bar").all() assert rows backup.dump_database(s, backup_dir) now = datetime.datetime.now() dump_file = os.path.join(backup_dir, "test.db.dump.{:%Y%m%d-%H%M}.gz".format(now)) assert os.path.isfile(dump_file) dbsetup.destroy() dbsetup.create() rows = s.query(backup_test_db.TestTable).all() assert not rows backup.load_database(s, dbsetup.Base.metadata, dump_file) rows = s.query(backup_test_db.TestTable).filter( backup_test_db.TestTable.foo == "bar").all() assert rows finally: shutil.rmtree(backup_dir)