def eval_paths(self): self.pidfile_path = os.path.join(self.pid_path, 'globaleaks.pid') self.files_path = os.path.abspath(os.path.join(self.working_path, 'files')) self.log_path = os.path.abspath(os.path.join(self.working_path, 'log')) self.attachments_path = os.path.abspath(os.path.join(self.working_path, 'attachments')) self.tmp_path = os.path.abspath(os.path.join(self.working_path, 'tmp')) self.backup_path = os.path.abspath(os.path.join(self.working_path, 'backups')) self.static_db_source = os.path.abspath(os.path.join(self.src_path, 'globaleaks', 'db')) self.db_schema = os.path.join(self.static_db_source, 'sqlite.sql') self.db_file_path = os.path.abspath(os.path.join(self.working_path, 'globaleaks.db')) self.logfile = os.path.abspath(os.path.join(self.log_path, 'globaleaks.log')) self.accesslogfile = os.path.abspath(os.path.join(self.log_path, "access.log")) # If we see that there is a custom build of GLClient, use that one. custom_client_path = '/var/globaleaks/client' if os.path.exists(custom_client_path): self.client_path = custom_client_path self.appdata_file = os.path.join(self.client_path, 'data/appdata.json') self.questionnaires_path = os.path.join(self.client_path, 'data/questionnaires') self.questions_path = os.path.join(self.client_path, 'data/questions') self.field_attrs_file = os.path.join(self.client_path, 'data/field_attrs.json') set_db_uri(make_db_uri(self.db_file_path))
def postconditions_36(self): session = get_session(make_db_uri(self.final_db_file)) hs = session.query(config.Config).filter(config.Config.tid == 1, config.Config.var_name == u'onionservice').one().value pk = session.query(config.Config).filter(config.Config.tid == 1, config.Config.var_name == u'tor_onion_key').one().value self.assertEqual('lftx7dbyvlc5txtl.onion', hs) with open(os.path.join(helpers.DATA_DIR, 'tor/ephemeral_service_key')) as f: saved_key = f.read().strip() self.assertEqual(saved_key, pk) session.close()
def get_db_file(db_path): path = os.path.join(db_path, 'globaleaks.db') if os.path.exists(path): session = get_session(make_db_uri(path)) version_db = session.query(models.Config.value).filter(Config.tid == 1, Config.var_name == u'version_db').one()[0] session.close() return version_db, path for i in reversed(range(0, DATABASE_VERSION + 1)): file_name = 'glbackend-%d.db' % i db_file_path = os.path.join(db_path, 'db', file_name) if os.path.exists(db_file_path): return i, db_file_path return 0, ''
def perform_data_update(db_file): """ Update the database including up-to-date application data :param db_file: The database file path """ session = get_session(make_db_uri(db_file), foreign_keys=False) enabled_languages = [ lang.name for lang in session.query(models.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: cfg = config.ConfigFactory(session, 1) stored_ver = cfg.get_val('version') if stored_ver != __version__: # The below commands can change the current store based on the what is # currently stored in the DB. for tid in [t[0] for t in session.query(models.Tenant.id)]: appdata = load_appdata() config.update_defaults(session, tid, appdata) db_load_defaults(session) cfg.set_val('version', __version__) cfg.set_val('latest_version', __version__) cfg.set_val('version_db', DATABASE_VERSION) session.commit() except: session.rollback() raise finally: session.close()
def perform_data_update(db_file): engine = get_engine(make_db_uri(db_file), foreign_keys=False) session = sessionmaker(bind=engine)() enabled_languages = [ lang.name for lang in session.query(models.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: prv = ConfigFactory(session, 1, 'node') stored_ver = prv.get_val(u'version') if stored_ver != __version__: # The below commands can change the current store based on the what is # currently stored in the DB. for tid in [t[0] for t in session.query(models.Tenant.id)]: appdata = load_appdata() config.update_defaults(session, tid, appdata) db_update_defaults(session) db_fix_fields_attrs(session) prv.set_val(u'version', __version__) prv.set_val(u'latest_version', __version__) session.commit() except: session.rollback() raise finally: session.close()
def eval_paths(self): self.pidfile_path = os.path.join(self.pid_path, 'globaleaks.pid') self.files_path = os.path.abspath( os.path.join(self.working_path, 'files')) self.log_path = os.path.abspath(os.path.join(self.working_path, 'log')) self.attachments_path = os.path.abspath( os.path.join(self.working_path, 'attachments')) self.tmp_path = os.path.abspath(os.path.join(self.working_path, 'tmp')) self.static_db_source = os.path.abspath( os.path.join(self.src_path, 'globaleaks', 'db')) self.db_schema = os.path.join(self.static_db_source, 'sqlite.sql') self.db_file_path = os.path.abspath( os.path.join(self.working_path, 'globaleaks.db')) self.logfile = os.path.abspath( os.path.join(self.log_path, 'globaleaks.log')) self.accesslogfile = os.path.abspath( os.path.join(self.log_path, "access.log")) # Client path detection possible_client_paths.insert(0, os.path.join(self.working_path, 'client')) for path in possible_client_paths: if os.path.isfile(os.path.join(path, 'index.html')): self.client_path = path break if not self.client_path: print("Unable to find a directory to load the client from") sys.exit(1) self.appdata_file = os.path.join(self.client_path, 'data/appdata.json') self.questionnaires_path = os.path.join(self.client_path, 'data/questionnaires') self.questions_path = os.path.join(self.client_path, 'data/questions') self.field_attrs_file = os.path.join(self.client_path, 'data/field_attrs.json') set_db_uri(make_db_uri(self.db_file_path))
def get_db_file(db_path): """ Utility function to retrieve the database file path :param db_path: The path where to look for the database file :return: The version and the path of the existing database file """ path = os.path.join(db_path, 'globaleaks.db') if os.path.exists(path): session = get_session(make_db_uri(path)) version_db = session.query(models.Config.value).filter( Config.tid == 1, Config.var_name == 'version_db').one()[0] session.close() return version_db, path for i in reversed(range(0, DATABASE_VERSION + 1)): file_name = 'glbackend-%d.db' % i db_file_path = os.path.join(db_path, 'db', file_name) if os.path.exists(db_file_path): return i, db_file_path return 0, ''
def eval_paths(self): self.config_file_path = '/etc/globaleaks' self.pidfile_path = os.path.join(self.pid_path, 'globaleaks.pid') self.files_path = os.path.abspath( os.path.join(self.working_path, 'files')) self.db_path = os.path.abspath(os.path.join(self.working_path, 'db')) self.log_path = os.path.abspath(os.path.join(self.working_path, 'log')) self.attachments_path = os.path.abspath( os.path.join(self.working_path, 'attachments')) self.tmp_path = os.path.abspath(os.path.join(self.working_path, 'tmp')) self.backups_path = os.path.abspath( os.path.join(self.working_path, 'backups')) self.static_db_source = os.path.abspath( os.path.join(self.src_path, 'globaleaks', 'db')) self.db_schema = os.path.join(self.static_db_source, 'sqlite.sql') self.db_file_name = 'glbackend-%d.db' % DATABASE_VERSION self.db_file_path = os.path.join( os.path.abspath(os.path.join(self.db_path, self.db_file_name))) self.logfile = os.path.abspath( os.path.join(self.log_path, 'globaleaks.log')) self.httplogfile = os.path.abspath( os.path.join(self.log_path, "http.log")) # If we see that there is a custom build of GLClient, use that one. custom_client_path = '/var/globaleaks/client' if os.path.exists(custom_client_path): self.client_path = custom_client_path self.appdata_file = os.path.join(self.client_path, 'data/appdata.json') self.questionnaires_path = os.path.join(self.client_path, 'data/questionnaires') self.questions_path = os.path.join(self.client_path, 'data/questions') self.field_attrs_file = os.path.join(self.client_path, 'data/field_attrs.json') set_db_uri(make_db_uri(self.db_file_path))
def perform_data_update(db_file): session = get_session(make_db_uri(db_file), foreign_keys=False) enabled_languages = [lang.name for lang in session.query(models.EnabledLanguage)] removed_languages = list(set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception("FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: cfg = ConfigFactory(session, 1) stored_ver = cfg.get_val(u'version') if stored_ver != __version__: # The below commands can change the current store based on the what is # currently stored in the DB. for tid in [t[0] for t in session.query(models.Tenant.id)]: appdata = load_appdata() config.update_defaults(session, tid, appdata) db_update_defaults(session) db_fix(session) cfg.set_val(u'version', __version__) cfg.set_val(u'latest_version', __version__) cfg.set_val(u'version_db', DATABASE_VERSION) session.commit() except: session.rollback() raise finally: session.close()
def perform_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: log.info("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(Settings.tmp_path, 'tmp')) if version < 41: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'db', 'glbackend-%d.db' % version)) else: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) final_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy(orig_db_file, os.path.join(tmpdir, 'old.db')) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath(os.path.join(tmpdir, 'old.db')) new_db_file = os.path.abspath(os.path.join(tmpdir, 'new.db')) if os.path.exists(new_db_file): shutil.move(new_db_file, old_db_file) Settings.db_file = new_db_file Settings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) log.info("Updating DB from version %d to version %d" % (version, version + 1)) j = version - FIRST_DATABASE_VERSION_SUPPORTED session_old = get_session(make_db_uri(old_db_file)) engine = get_engine(make_db_uri(new_db_file), foreign_keys=False) if FIRST_DATABASE_VERSION_SUPPORTED + j + 1 == DATABASE_VERSION: Base.metadata.create_all(engine) else: Bases[j+1].metadata.create_all(engine) session_new = sessionmaker(bind=engine)() # Here is instanced the migration script MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript(migration_mapping, version, session_old, session_new) log.info("Migrating table:") try: try: migration_script.prologue() except Exception as exception: log.err("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: log.err("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: log.err("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() log.info("Migration stats:") # we open a new db in order to verify integrity of the generated file session_verify = get_session(make_db_uri(new_db_file)) for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: count = session_verify.query(migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: log.info(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: log.info(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 session_verify.close() perform_data_update(new_db_file) # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) if orig_db_file != final_db_file: overwrite_and_remove(orig_db_file) path = os.path.join(Settings.working_path, 'db') if os.path.exists(path): shutil.rmtree(path) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): overwrite_and_remove(os.path.join(tmpdir, f)) shutil.rmtree(tmpdir)
def postconditions_30(self): session = get_session(make_db_uri(self.final_db_file)) self.assertTrue(session.query(models.File).filter(models.File.id == u'logo').count() == 1) self.assertTrue(session.query(models.File).filter(models.File.id == u'css').count() == 1) session.close()
def perform_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: log.info("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(Settings.tmp_path, 'tmp')) if version < 41: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'db', 'glbackend-%d.db' % version)) else: orig_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) final_db_file = os.path.abspath(os.path.join(Settings.working_path, 'globaleaks.db')) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy(orig_db_file, os.path.join(tmpdir, 'old.db')) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath(os.path.join(tmpdir, 'old.db')) new_db_file = os.path.abspath(os.path.join(tmpdir, 'new.db')) if os.path.exists(new_db_file): shutil.move(new_db_file, old_db_file) Settings.db_file = new_db_file Settings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) log.info("Updating DB from version %d to version %d" % (version, version + 1)) j = version - FIRST_DATABASE_VERSION_SUPPORTED session_old = get_session(make_db_uri(old_db_file)) engine = get_engine(make_db_uri(new_db_file), foreign_keys=False) if FIRST_DATABASE_VERSION_SUPPORTED + j + 1 == DATABASE_VERSION: Base.metadata.create_all(engine) else: Bases[j+1].metadata.create_all(engine) session_new = sessionmaker(bind=engine)() # Here is instanced the migration script MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript(migration_mapping, version, session_old, session_new) log.info("Migrating table:") try: try: migration_script.prologue() except Exception as exception: log.err("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: log.err("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: log.err("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() log.info("Migration stats:") # we open a new db in order to verify integrity of the generated file session_verify = get_session(make_db_uri(new_db_file)) for model_name, _ in migration_mapping.items(): if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None: count = session_verify.query(migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % (model_name, count, migration_script.entries_count[model_name])) else: log.info(" * %s table migrated (entries count changed from %d to %d)" % (model_name, migration_script.entries_count[model_name], count)) else: log.info(" * %s table migrated (%d entry(s))" % (model_name, migration_script.entries_count[model_name])) version += 1 session_verify.close() perform_data_update(new_db_file) # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) if orig_db_file != final_db_file: overwrite_and_remove(orig_db_file) path = os.path.join(Settings.working_path, 'db') if os.path.exists(path): shutil.rmtree(path) except Exception as e: print(e) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): overwrite_and_remove(os.path.join(tmpdir, f)) shutil.rmtree(tmpdir)