def postconditions_30(self): session = get_session(make_db_uri(self.final_db_file)) self.assertTrue( session.query(models.File).filter( models.File.id == u'logo').count() == 1) self.assertTrue( session.query(models.File).filter( models.File.id == u'css').count() == 1) session.close()
def test_transaction_with_exception(self): session = get_session() count1 = session.query(Tenant).count() yield self.assertFailure(self._transact_with_exception(), Exception) count2 = session.query(Tenant).count() self.assertEqual(count1, count2)
def test_transaction_with_exception(self): session = get_session() count1 = session.query(Counter).count() yield self.assertFailure(self._transact_with_exception(), Exception) count2 = session.query(Counter).count() self.assertEqual(count1, count2)
def postconditions_36(self): session = get_session(make_db_uri(self.final_db_file)) hs = session.query(config.Config).filter(config.Config.tid == 1, config.Config.var_name == u'onionservice').one().value pk = session.query(config.Config).filter(config.Config.tid == 1, config.Config.var_name == u'tor_onion_key').one().value self.assertEqual('lftx7dbyvlc5txtl.onion', hs) with open(os.path.join(helpers.DATA_DIR, 'tor/ephemeral_service_key')) as f: saved_key = f.read().strip() self.assertEqual(saved_key, pk) session.close()
def get_db_file(db_path): path = os.path.join(db_path, 'globaleaks.db') if os.path.exists(path): session = get_session(make_db_uri(path)) version_db = session.query(models.Config.value).filter(Config.tid == 1, Config.var_name == u'version_db').one()[0] session.close() return version_db, path for i in reversed(range(0, DATABASE_VERSION + 1)): file_name = 'glbackend-%d.db' % i db_file_path = os.path.join(db_path, 'db', file_name) if os.path.exists(db_file_path): return i, db_file_path return 0, ''
def perform_data_update(db_file): """ Update the database including up-to-date application data :param db_file: The database file path """ now = datetime_now() appdata = load_appdata() session = get_session(make_db_uri(db_file), foreign_keys=False) enabled_languages = [ lang.name for lang in session.query(models.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" % removed_languages) try: if config.ConfigFactory(session, 1).get_val('version') != __version__: session.query(models.Config).filter_by(var_name = 'version') \ .update({'value': __version__, 'update_date': now}) session.query(models.Config).filter_by(var_name = 'latest_version') \ .update({'value': __version__, 'update_date': now}) session.query(models.Config).filter_by(var_name = 'version_db') \ .update({'value': DATABASE_VERSION, 'update_date': now}) for tid in [t[0] for t in session.query(models.Tenant.id)]: config.update_defaults(session, tid, appdata) db_load_defaults(session) session.commit() except: session.rollback() raise finally: session.close()
def perform_data_update(db_file): """ Update the database including up-to-date application data :param db_file: The database file path """ session = get_session(make_db_uri(db_file), foreign_keys=False) enabled_languages = [ lang.name for lang in session.query(models.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: cfg = config.ConfigFactory(session, 1) stored_ver = cfg.get_val('version') if stored_ver != __version__: # The below commands can change the current store based on the what is # currently stored in the DB. for tid in [t[0] for t in session.query(models.Tenant.id)]: appdata = load_appdata() config.update_defaults(session, tid, appdata) db_load_defaults(session) cfg.set_val('version', __version__) cfg.set_val('latest_version', __version__) cfg.set_val('version_db', DATABASE_VERSION) session.commit() except: session.rollback() raise finally: session.close()
def get_db_file(db_path): """ Utility function to retrieve the database file path :param db_path: The path where to look for the database file :return: The version and the path of the existing database file """ path = os.path.join(db_path, 'globaleaks.db') if os.path.exists(path): session = get_session(make_db_uri(path)) version_db = session.query(models.Config.value).filter( Config.tid == 1, Config.var_name == 'version_db').one()[0] session.close() return version_db, path for i in reversed(range(0, DATABASE_VERSION + 1)): file_name = 'glbackend-%d.db' % i db_file_path = os.path.join(db_path, 'db', file_name) if os.path.exists(db_file_path): return i, db_file_path return 0, ''
def perform_data_update(db_file): session = get_session(make_db_uri(db_file), foreign_keys=False) enabled_languages = [lang.name for lang in session.query(models.EnabledLanguage)] removed_languages = list(set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception("FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: cfg = ConfigFactory(session, 1) stored_ver = cfg.get_val(u'version') if stored_ver != __version__: # The below commands can change the current store based on the what is # currently stored in the DB. for tid in [t[0] for t in session.query(models.Tenant.id)]: appdata = load_appdata() config.update_defaults(session, tid, appdata) db_update_defaults(session) db_fix(session) cfg.set_val(u'version', __version__) cfg.set_val(u'latest_version', __version__) cfg.set_val(u'version_db', DATABASE_VERSION) session.commit() except: session.rollback() raise finally: session.close()
def perform_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: log.info("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(Settings.tmp_path, 'tmp')) if version < 41: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'db', 'glbackend-%d.db' % version)) else: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) final_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy(orig_db_file, os.path.join(tmpdir, 'old.db')) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath(os.path.join(tmpdir, 'old.db')) new_db_file = os.path.abspath(os.path.join(tmpdir, 'new.db')) if os.path.exists(new_db_file): shutil.move(new_db_file, old_db_file) Settings.db_file = new_db_file Settings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) log.info("Updating DB from version %d to version %d" % (version, version + 1)) j = version - FIRST_DATABASE_VERSION_SUPPORTED session_old = get_session(make_db_uri(old_db_file)) engine = get_engine(make_db_uri(new_db_file), foreign_keys=False) if FIRST_DATABASE_VERSION_SUPPORTED + j + 1 == DATABASE_VERSION: Base.metadata.create_all(engine) else: Bases[j+1].metadata.create_all(engine) session_new = sessionmaker(bind=engine)() # Here is instanced the migration script MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript(migration_mapping, version, session_old, session_new) log.info("Migrating table:") try: try: migration_script.prologue() except Exception as exception: log.err("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: log.err("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: log.err("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() log.info("Migration stats:") # we open a new db in order to verify integrity of the generated file session_verify = get_session(make_db_uri(new_db_file)) for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: count = session_verify.query(migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: log.info(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: log.info(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 session_verify.close() perform_data_update(new_db_file) # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) if orig_db_file != final_db_file: overwrite_and_remove(orig_db_file) path = os.path.join(Settings.working_path, 'db') if os.path.exists(path): shutil.rmtree(path) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): overwrite_and_remove(os.path.join(tmpdir, f)) shutil.rmtree(tmpdir)
def test_transact_with_stuff(self): yield self._transact_with_success() # now check data actually written session = get_session() self.assertEqual(session.query(Tenant).count(), 2)
def perform_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: log.info("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(Settings.tmp_path, 'tmp')) if version < 41: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'db', 'glbackend-%d.db' % version)) else: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) final_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy(orig_db_file, os.path.join(tmpdir, 'old.db')) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath(os.path.join(tmpdir, 'old.db')) new_db_file = os.path.abspath(os.path.join(tmpdir, 'new.db')) if os.path.exists(new_db_file): shutil.move(new_db_file, old_db_file) Settings.db_file = new_db_file Settings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) log.info("Updating DB from version %d to version %d" % (version, version + 1)) j = version - FIRST_DATABASE_VERSION_SUPPORTED session_old = get_session(make_db_uri(old_db_file)) engine = get_engine(make_db_uri(new_db_file), foreign_keys=False) if FIRST_DATABASE_VERSION_SUPPORTED + j + 1 == DATABASE_VERSION: Base.metadata.create_all(engine) else: Bases[j+1].metadata.create_all(engine) session_new = sessionmaker(bind=engine)() # Here is instanced the migration script MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript(migration_mapping, version, session_old, session_new) log.info("Migrating table:") try: try: migration_script.prologue() except Exception as exception: log.err("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: log.err("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: log.err("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() log.info("Migration stats:") # we open a new db in order to verify integrity of the generated file session_verify = get_session(make_db_uri(new_db_file)) for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: count = session_verify.query(migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % (model_name, count, migration_script.entries_count[model_name])) else: log.info(" * %s table migrated (entries count changed from %d to %d)" % (model_name, migration_script.entries_count[model_name], count)) else: log.info(" * %s table migrated (%d entry(s))" % (model_name, migration_script.entries_count[model_name])) version += 1 session_verify.close() perform_data_update(new_db_file) # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) if orig_db_file != final_db_file: overwrite_and_remove(orig_db_file) path = os.path.join(Settings.working_path, 'db') if os.path.exists(path): shutil.rmtree(path) except Exception as e: print(e) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): overwrite_and_remove(os.path.join(tmpdir, f)) shutil.rmtree(tmpdir)
def test_transact_with_stuff(self): yield self._transact_with_success() # now check data actually written session = get_session() self.assertEqual(session.query(Counter).count(), 1)
def postconditions_30(self): session = get_session(make_db_uri(self.final_db_file)) self.assertTrue(session.query(models.File).filter(models.File.id == u'logo').count() == 1) self.assertTrue(session.query(models.File).filter(models.File.id == u'css').count() == 1) session.close()