def migrate_Context(self): old_objs = self.session_old.query(self.model_from['Context']) for old_obj in old_objs: new_obj = self.model_to['Context']() for key in [c.key for c in new_obj.__table__.columns]: if key == 'tip_timetolive': tip_ttl = 5 * 365 if old_obj.tip_timetolive > tip_ttl: Settings.print_msg('[WARNING] Found an expiration date longer than 5 years! Configuring tips to never expire.') # If data retention was larger than 5 years the intended goal was # probably to keep the submission around forever. new_obj.tip_timetolive = -1 elif old_obj.tip_timetolive < -1: Settings.print_msg('[WARNING] Found a negative tip expiration! Configuring tips to never expire.') new_obj.tip_timetolive = -1 else: new_obj.tip_timetolive = old_obj.tip_timetolive continue elif key == 'enable_rc_to_wb_files': new_obj.enable_rc_to_wb_files = False else: setattr(new_obj, key, getattr(old_obj, key)) self.session_new.add(new_obj)
def _test(self, path, version): f = 'glbackend-%d.db' % version helpers.init_glsettings_for_unit_tests() Settings.db_path = os.path.join(Settings.ramdisk_path, 'db_test') self.start_db_file = os.path.abspath( os.path.join(Settings.db_path, 'glbackend-%d.db' % version)) self.final_db_file = os.path.abspath( os.path.join(Settings.db_path, 'glbackend-%d.db' % DATABASE_VERSION)) self.start_db_uri = Settings.make_db_uri(self.start_db_file) Settings.db_uri = Settings.make_db_uri(self.final_db_file) shutil.rmtree(Settings.db_path, True) os.mkdir(Settings.db_path) dbpath = os.path.join(path, f) dbfile = os.path.join(Settings.db_path, f) shutil.copyfile(dbpath, dbfile) # TESTS PRECONDITIONS preconditions = getattr(self, 'preconditions_%d' % version, None) if preconditions is not None: preconditions() ret = update_db() # TESTS POSTCONDITIONS postconditions = getattr(self, 'postconditions_%d' % version, None) if postconditions is not None: postconditions() shutil.rmtree(Settings.db_path) self.assertNotEqual(ret, -1)
def migrate_Context(self): old_objs = self.store_old.find(self.model_from['Context']) for old_obj in old_objs: new_obj = self.model_to['Context']() for _, v in new_obj._storm_columns.items(): if v.name == 'tip_timetolive': # NOTE hardcoded policy. . . . tip_ttl = 5 * 365 if old_obj.tip_timetolive > tip_ttl: Settings.print_msg( '[WARNING] Found an expiration date longer than 5 years! Configuring tips to never expire.' ) # If data retention was larger than 5 years the intended goal was # probably to keep the submission around forever. new_obj.tip_timetolive = -1 elif old_obj.tip_timetolive < -1: Settings.print_msg( '[WARNING] Found a negative tip expiration! Configuring tips to never expire.' ) new_obj.tip_timetolive = -1 else: new_obj.tip_timetolive = old_obj.tip_timetolive continue elif v.name == 'enable_rc_to_wb_files': new_obj.enable_rc_to_wb_files = False continue setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj)
def _deferred_start(self): ret = update_db() if ret == -1: reactor.stop() if ret == 0: init_db() sync_clean_untracked_files() sync_refresh_memory_variables() self.state.orm_tp.start() reactor.addSystemEventTrigger('before', 'shutdown', self.shutdown) for sock in self.state.http_socks: listen_tcp_on_sock(reactor, sock.fileno(), self.api_factory) self.state.process_supervisor = ProcessSupervisor( self.state.https_socks, '127.0.0.1', 8082) self.state.process_supervisor.maybe_launch_https_workers() self.start_jobs() Settings.print_listening_interfaces()
def execute_query(self, query): try: self.store_new.execute(query + ';') except OperationalError as excep: Settings.print_msg( 'OperationalError %s while executing query: %s' % (excep, query)) raise excep
def migrate_model(self, model_name): objs_count = self.session_old.query(self.model_from[model_name]).count() specific_migration_function = getattr(self, 'migrate_%s' % model_name, None) if specific_migration_function is not None: Settings.print_msg(' ł %s [#%d]' % (model_name, objs_count)) specific_migration_function() else: Settings.print_msg(' * %s [#%d]' % (model_name, objs_count)) self.generic_migration_function(model_name)
def migrate_model(self, model_name): objs_count = self.store_old.find(self.model_from[model_name]).count() specific_migration_function = getattr(self, 'migrate_%s' % model_name, None) if specific_migration_function is not None: Settings.print_msg(' ł %s [#%d]' % (model_name, objs_count)) specific_migration_function() else: Settings.print_msg(' * %s [#%d]' % (model_name, objs_count)) self.generic_migration_function(model_name)
def migrate_model(self, model_name): if self.entries_count[model_name] <= 0: return Settings.print_msg(' * %s [#%d]' % (model_name, self.entries_count[model_name])) specific_migration_function = getattr(self, 'migrate_%s' % model_name, None) if specific_migration_function is None: self.generic_migration_function(model_name) else: specific_migration_function()
def perform_data_update(dbfile): store = Store(create_database(Settings.make_db_uri(dbfile))) enabled_languages = [ lang.name for lang in store.find(l10n.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if removed_languages: removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: db_perform_data_update(store) store.commit() except: store.rollback() raise finally: store.close()
def setUp(self): helpers.init_glsettings_for_unit_tests() Settings.db_path = os.path.join(Settings.ramdisk_path, 'db_test') shutil.rmtree(Settings.db_path, True) os.mkdir(Settings.db_path) db_name = 'glbackend-%d.db' % DATABASE_VERSION db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name) shutil.copyfile(db_path, os.path.join(Settings.db_path, db_name)) self.db_file = os.path.join(Settings.db_path, db_name) Settings.db_uri = Settings.make_db_uri(self.db_file) # place a dummy version in the current db store = Store(create_database(Settings.db_uri)) prv = config.PrivateFactory(store) self.dummy_ver = '2.XX.XX' prv.set_val(u'version', self.dummy_ver) self.assertEqual(prv.get_val(u'version'), self.dummy_ver) store.commit() store.close() # backup various mocks that we will use self._bck_f = config.is_cfg_valid GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop( 'smtp_password') self.dp = u'yes_you_really_should_change_me'
def init_glsettings_for_unit_tests(): Settings.testing = True Settings.set_devel_mode() Settings.logging = None Settings.failed_login_attempts = 0 Settings.working_path = './working_path' Settings.eval_paths() Settings.set_ramdisk_path() Settings.remove_directories() Settings.create_directories() State.orm_tp = FakeThreadPool() State.tenant_cache[1].hostname = 'localhost' Sessions.clear()
def init_state(): Settings.testing = True Settings.set_devel_mode() Settings.logging = None Settings.failed_login_attempts = 0 Settings.working_path = './working_path' Settings.eval_paths() if os.path.exists(Settings.working_path): dir_util.remove_tree(Settings.working_path, 0) orm.set_thread_pool(FakeThreadPool()) State.settings.enable_api_cache = False State.tenant_cache[1] = ObjectDict() State.tenant_cache[1].hostname = 'www.globaleaks.org' State.init_environment() Sessions.clear()
def init_state(): Settings.set_devel_mode() Settings.disable_notifications = True Settings.failed_login_attempts.clear() Settings.working_path = os.path.abspath('./working_path') Settings.eval_paths() if os.path.exists(Settings.working_path): shutil.rmtree(Settings.working_path) orm.set_thread_pool(FakeThreadPool()) State.settings.enable_api_cache = False State.tenant_cache[1] = ObjectDict() State.tenant_cache[1].hostname = 'www.globaleaks.org' State.tenant_cache[1].encryption = True State.init_environment() Sessions.clear()
def init_state(): Settings.testing = True Settings.set_devel_mode() Settings.logging = None Settings.failed_login_attempts = 0 Settings.working_path = os.path.abspath('./working_path') Settings.eval_paths() if os.path.exists(Settings.working_path): dir_util.remove_tree(Settings.working_path, 0) orm.set_thread_pool(FakeThreadPool()) State.settings.enable_api_cache = False State.tenant_cache[1] = ObjectDict() State.tenant_cache[1].hostname = 'www.globaleaks.org' State.tenant_cache[1].encryption = True State.init_environment() Sessions.clear()
def postconditions_36(self): new_uri = Settings.make_db_uri( os.path.join(Settings.db_path, Settings.db_file_name)) store = Store(create_database(new_uri)) hs = config.NodeFactory(store).get_val(u'onionservice') pk = config.PrivateFactory(store).get_val(u'tor_onion_key') self.assertEqual('lftx7dbyvlc5txtl.onion', hs) with open(os.path.join(helpers.DATA_DIR, 'tor/ephemeral_service_key')) as f: saved_key = f.read().strip() self.assertEqual(saved_key, pk) store.close()
def post(self): if not State.tenant_cache[1].hostname: raise errors.ValidationError('hostname is not set') net_agent = Settings.get_agent() t = ('http', State.tenant_cache[1].hostname, 'robots.txt', None, None) url = bytes(urlparse.urlunsplit(t)) try: resp = yield net_agent.request('GET', url) body = yield readBody(resp) server_h = resp.headers.getRawHeaders('Server', [None])[-1].lower() if not body.startswith( 'User-agent: *') or server_h != 'globaleaks': raise EnvironmentError('Response unexpected') except (EnvironmentError, ConnectError) as e: raise errors.ExternalResourceError()
def startService(self): mask = 0 if Settings.devel_mode: mask = 8000 # Allocate local ports for port in Settings.bind_local_ports: http_sock, fail = reserve_port_for_ip('127.0.0.1', port) if fail is not None: log.err("Could not reserve socket for %s (error: %s)", fail[0], fail[1]) else: self.state.http_socks += [http_sock] # Allocate remote ports for port in Settings.bind_remote_ports: sock, fail = reserve_port_for_ip(Settings.bind_address, port + mask) if fail is not None: log.err("Could not reserve socket for %s (error: %s)", fail[0], fail[1]) continue if port == 80: self.state.http_socks += [sock] elif port == 443: self.state.https_socks += [sock] if Settings.disable_swap: disable_swap() Settings.fix_file_permissions() Settings.drop_privileges() Settings.check_directories() reactor.callLater(0, self.deferred_start)
def operation(self): net_agent = Settings.get_agent() log.debug('Fetching list of Tor exit nodes') yield State.tor_exit_set.update(net_agent) log.debug('Retrieved a list of %d exit nodes', len(State.tor_exit_set))
def perform_schema_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: log.info( "Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(Settings.db_path, 'tmp')) orig_db_file = os.path.abspath( os.path.join(Settings.db_path, 'glbackend-%d.db' % version)) final_db_file = os.path.abspath( os.path.join(Settings.db_path, 'glbackend-%d.db' % DATABASE_VERSION)) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy2(orig_db_file, tmpdir) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % version)) new_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1))) Settings.db_file = new_db_file Settings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) log.info("Updating DB from version %d to version %d" % (version, version + 1)) store_old = Store(create_database('sqlite:' + old_db_file)) store_new = Store(create_database('sqlite:' + new_db_file)) # Here is instanced the migration script MigrationModule = importlib.import_module( "globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript( migration_mapping, version, store_old, store_new) log.info("Migrating table:") try: try: migration_script.prologue() except Exception as exception: log.err("Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.items(): if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: log.err("Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: log.err("Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() log.info("Migration stats:") # we open a new db in order to verify integrity of the generated file store_verify = Store( create_database(Settings.make_db_uri(new_db_file))) for model_name, _ in migration_mapping.items(): if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: count = store_verify.find( migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: log.info(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: log.info(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 store_verify.close() perform_data_update(new_db_file) except Exception: raise else: # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) security.overwrite_and_remove(orig_db_file) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): security.overwrite_and_remove(os.path.join(tmpdir, f)) shutil.rmtree(tmpdir)
def fetch_packages_file(self): return get_page(Settings.get_agent(), DEB_PACKAGE_URL)