def db_refresh_memory_variables(store): """ This routine loads in memory few variables of node and notification tables that are subject to high usage. """ node_ro = ObjectDict(NodeFactory(store).admin_export()) GLSettings.memory_copy = node_ro GLSettings.memory_copy.accept_tor2web_access = { 'admin': node_ro.tor2web_admin, 'custodian': node_ro.tor2web_custodian, 'whistleblower': node_ro.tor2web_whistleblower, 'receiver': node_ro.tor2web_receiver } enabled_langs = models.l10n.EnabledLanguage.list(store) GLSettings.memory_copy.languages_enabled = enabled_langs notif_fact = NotificationFactory(store) notif_ro = ObjectDict(notif_fact.admin_export()) GLSettings.memory_copy.notif = notif_ro if GLSettings.developer_name: GLSettings.memory_copy.notif.source_name = GLSettings.developer_name db_refresh_exception_delivery_list(store) GLSettings.memory_copy.private = ObjectDict( PrivateFactory(store).mem_copy_export())
def db_refresh_memory_variables(store): """ This routine loads in memory few variables of node and notification tables that are subject to high usage. """ node_ro = ObjectDict(models.config.NodeFactory(store).admin_export()) GLSettings.memory_copy = node_ro GLSettings.memory_copy.accept_tor2web_access = { 'admin': node_ro.tor2web_admin, 'custodian': node_ro.tor2web_custodian, 'whistleblower': node_ro.tor2web_whistleblower, 'receiver': node_ro.tor2web_receiver } enabled_langs = models.l10n.EnabledLanguage.list(store) GLSettings.memory_copy.languages_enabled = enabled_langs notif_fact = models.config.NotificationFactory(store) notif_ro = ObjectDict(notif_fact.admin_export()) GLSettings.memory_copy.notif = notif_ro if GLSettings.developer_name: GLSettings.memory_copy.notif.source_name = GLSettings.developer_name db_refresh_exception_delivery_list(store) GLSettings.memory_copy.private = ObjectDict(models.config.PrivateFactory(store).mem_copy_export()) if GLSettings.memory_copy.private.admin_api_token_digest: api_id = store.find(models.User.id, models.User.role==u'admin').order_by(models.User.creation_date).first() if api_id is not None: GLSettings.appstate.api_token_session = GLSession(api_id, 'admin', 'enabled')
def test_object_dict(self): od = ObjectDict() self.assertRaises(AttributeError, getattr, od, 'something') od['foo'] = 'bar' self.assertEqual(od['foo'], 'bar') self.assertEqual(od.foo, 'bar') od.key = 'value' self.assertEqual(od['key'], 'value')
def test_object_dict(self): od = ObjectDict() self.assertRaises(AttributeError, getattr, od, 'something') od['foo'] = 'bar' self.assertEqual(od['foo'], 'bar') self.assertEqual(od.foo, 'bar') od.key = 'value' self.assertEqual(od['key'], b'value')
def test_object_dict(self): od = ObjectDict() self.assertEqual(od.foo, None) od['foo'] = 'bar' self.assertEqual(od['foo'], 'bar') self.assertEqual(od.foo, 'bar') od.key = 'value' self.assertEqual(od['key'], 'value')
def db_refresh_memory_variables(store): """ This routine loads in memory few variables of node and notification tables that are subject to high usage. """ tenant_cache = ObjectDict(models.config.NodeFactory(store).admin_export()) tenant_cache.accept_tor2web_access = { 'admin': tenant_cache.tor2web_admin, 'custodian': tenant_cache.tor2web_custodian, 'whistleblower': tenant_cache.tor2web_whistleblower, 'receiver': tenant_cache.tor2web_receiver } tenant_cache.languages_enabled = models.l10n.EnabledLanguage.list(store) tenant_cache.notif = ObjectDict( models.config.NotificationFactory(store).admin_export()) tenant_cache.notif.exception_delivery_list = db_get_exception_delivery_list( store) if Settings.developer_name: tenant_cache.notif.source_name = Settings.developer_name tenant_cache.private = ObjectDict( models.config.PrivateFactory(store).mem_copy_export()) if tenant_cache.private.admin_api_token_digest: api_id = store.find(models.User.id, models.User.role == u'admin').order_by( models.User.creation_date).first() if api_id is not None: State.api_token_session = Session(api_id, 'admin', 'enabled') # The hot swap shoul be done with the minimal race condition possible State.tenant_cache[1] = tenant_cache
def db_refresh_tenant_cache(session, tid_list): """ This routine loads in memory few variables of node and notification tables that are subject to high usage. """ for cfg in session.query(Config).filter(Config.tid.in_(tid_list)): tenant_cache = State.tenant_cache[cfg.tid] if cfg.var_name in ConfigFilters['node']: tenant_cache[cfg.var_name] = cfg.value elif cfg.var_name in ConfigFilters['notification']: tenant_cache.setdefault('notification', ObjectDict()) tenant_cache['notification'][cfg.var_name] = cfg.value for tid, lang in models.EnabledLanguage.tid_list(session, tid_list): State.tenant_cache[tid].setdefault('languages_enabled', []).append(lang) for tid in tid_list: State.tenant_cache[tid]['ip_filter'] = {} State.tenant_cache[tid]['https_allowed'] = {} State.tenant_cache[tid]['redirects'] = {} for x in [ ('admin', 'ip_filter_admin_enable', 'ip_filter_admin'), ('custodian', 'ip_filter_custodian_enable', 'ip_filter_custodian'), ('receiver', 'ip_filter_receiver_enable', 'ip_filter_receiver'), ('whistleblower', 'ip_filter_whistleblower_enable', 'ip_filter_whistleblower') ]: if State.tenant_cache[tid].get( x[1], False) and State.tenant_cache[1][x[2]]: State.tenant_cache[tid]['ip_filter'][ x[0]] = State.tenant_cache[1][x[2]] for x in ['admin', 'custodian', 'receiver', 'whistleblower']: State.tenant_cache[tid]['https_allowed'][x] = State.tenant_cache[ tid].get('https_' + x, True) if State.tenant_cache[tid].mode == 'whistleblowing.it': State.tenant_cache[tid]['https_preload'] = State.tenant_cache[1][ 'https_preload'] State.tenant_cache[tid]['frame_ancestors'] = State.tenant_cache[1][ 'frame_ancestors'] for redirect in session.query(models.Redirect).filter( models.Redirect.tid.in_(tid_list)): State.tenant_cache[tid]['redirects'][redirect.path1] = redirect.path2
def db_refresh_tenant_cache(session, tid_list): """ This routine loads in memory few variables of node and notification tables that are subject to high usage. """ for cfg in session.query(Config).filter(Config.tid.in_(tid_list)): tenant_cache = State.tenant_cache[cfg.tid] if cfg.var_name in ConfigFilters['node']: tenant_cache[cfg.var_name] = cfg.get_v() elif cfg.var_name in ConfigFilters['notification']: tenant_cache.setdefault('notification', ObjectDict()) tenant_cache['notification'][cfg.var_name] = cfg.get_v() for tid, lang in models.EnabledLanguage.tid_list(session, tid_list): State.tenant_cache[tid].setdefault('languages_enabled', []).append(lang)
def __init__(self): self.orm_tp = ThreadPool(1, 1) self.process_supervisor = None self.tor_exit_set = TorExitSet() self.https_socks = [] self.http_socks = [] self.jobs = [] self.jobs_monitor = None self.services = [] self.api_token_session = None self.api_token_session_suspended = False self.RecentEventQ = [] self.RecentAnomaliesQ = {} self.exceptions = {} self.exceptions_email_count = 0 self.mail_counters = {} self.stats_collection_start_time = datetime_now() self.accept_submissions = True self.tenant_cache = {} self.tenant_cache[1] = ObjectDict({ 'maximum_namesize': 128, 'maximum_textsize': 4096, 'maximum_filesize': 30, 'allow_iframes_inclusion': False, 'accept_tor2web_access': { 'admin': True, 'whistleblower': False, 'custodian': False, 'receiver': False }, 'private': { 'https_enabled': False, }, 'anonymize_outgoing_connections': True, })
def init_state(): Settings.testing = True Settings.set_devel_mode() Settings.logging = None Settings.failed_login_attempts = 0 Settings.working_path = './working_path' Settings.eval_paths() if os.path.exists(Settings.working_path): dir_util.remove_tree(Settings.working_path, 0) orm.set_thread_pool(FakeThreadPool()) State.settings.enable_api_cache = False State.tenant_cache[1] = ObjectDict() State.tenant_cache[1].hostname = 'www.globaleaks.org' State.init_environment() Sessions.clear()
def init_state(): Settings.set_devel_mode() Settings.disable_notifications = True Settings.failed_login_attempts.clear() Settings.working_path = os.path.abspath('./working_path') Settings.eval_paths() if os.path.exists(Settings.working_path): shutil.rmtree(Settings.working_path) orm.set_thread_pool(FakeThreadPool()) State.settings.enable_api_cache = False State.tenant_cache[1] = ObjectDict() State.tenant_cache[1].hostname = 'www.globaleaks.org' State.tenant_cache[1].encryption = True State.init_environment() Sessions.clear()
def db_refresh_memory_variables(session, to_refresh=None): tenant_map = { tenant.id: tenant for tenant in session.query(models.Tenant).filter( models.Tenant.active == True) } existing_tids = set(tenant_map.keys()) cached_tids = set(State.tenant_state.keys()) to_remove = cached_tids - existing_tids to_add = existing_tids - cached_tids for tid in to_remove: if tid in State.tenant_state: del State.tenant_state[tid] if tid in State.tenant_cache: del State.tenant_cache[tid] for tid in to_add: State.tenant_state[tid] = TenantState(State) State.tenant_cache[tid] = ObjectDict() if to_refresh is None: to_refresh = tenant_map.keys() else: to_refresh = [tid for tid in to_refresh if tid in tenant_map] if to_refresh: db_refresh_tenant_cache(session, to_refresh) if 1 in to_refresh: to_refresh = State.tenant_cache.keys() db_set_cache_exception_delivery_list(session, State.tenant_cache[1]) if State.tenant_cache[1].admin_api_token_digest: State.api_token_session = Session(1, 0, 'admin', False, '') log.setloglevel(State.tenant_cache[1].log_level) rootdomain = State.tenant_cache[1].rootdomain root_onionservice = State.tenant_cache[1].onionservice for tid in to_refresh: if tid not in tenant_map: continue tenant = tenant_map[tid] if not tenant.active and tid != 1: continue hostnames = [] onionnames = [] if State.tenant_cache[tid].hostname != '': hostnames.append(State.tenant_cache[tid].hostname.encode()) if State.tenant_cache[tid].onionservice != '': onionnames.append(State.tenant_cache[tid].onionservice.encode()) if rootdomain != '': hostnames.append('p{}.{}'.format(tid, rootdomain).encode()) if root_onionservice != '': onionnames.append('p{}.{}'.format(tid, root_onionservice).encode()) if tenant.subdomain != '': if rootdomain != '': onionnames.append('{}.{}'.format(tenant.subdomain, rootdomain).encode()) if root_onionservice != '': onionnames.append('{}.{}'.format(tenant.subdomain, root_onionservice).encode()) State.tenant_cache[tid].hostnames = hostnames State.tenant_cache[tid].onionnames = onionnames State.tenant_hostname_id_map.update( {h: tid for h in hostnames + onionnames})
def __init__(self): # command line parsing utils self.parser = OptionParser() self.cmdline_options = None # version self.version_string = __version__ # testing # This variable is to be able to hook/bypass code when unit-tests are run self.testing = False # daemonize the process self.nodaemon = False # thread pool size of 1 self.orm_tp = ThreadPool(1, 1) self.bind_address = '0.0.0.0' self.bind_remote_ports = [80, 443] self.bind_local_ports = [8082, 8083] # store name self.store_name = 'main_store' self.db_type = 'sqlite' # debug defaults self.orm_debug = False # files and paths self.root_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '..')) self.pid_path = '/var/run/globaleaks' self.working_path = '/var/globaleaks' # TODO(bug-fix-italian-style) why is this set to the 2nd entry in the possible # client paths...? please fix. self.client_path = '/usr/share/globaleaks/client' for path in possible_client_paths: if os.path.exists(path): self.client_path = path break self.set_ramdisk_path() self.authentication_lifetime = 3600 self.jobs = [] self.jobs_monitor = None self.services = [] self.RecentEventQ = [] self.RecentAnomaliesQ = {} self.stats_collection_start_time = datetime_now() self.accept_submissions = True # statistical, referred to latest period # and resetted by session_management sched self.failed_login_attempts = 0 # static file rules self.staticfile_regexp = r'(.*)' self.staticfile_overwrite = False self.local_hosts = ['127.0.0.1', 'localhost'] self.onionservice = None self.receipt_regexp = u'[0-9]{16}' # A lot of operations performed massively by globaleaks # should avoid to fetch continuously variables from the DB so that # it is important to keep this variables in memory # # Initialization is handled by db_refresh_memory_variables self.memory_copy = ObjectDict({ 'maximum_namesize': 128, 'maximum_textsize': 4096, 'maximum_filesize': 30, 'allow_iframes_inclusion': False, 'accept_tor2web_access': { 'admin': True, 'whistleblower': False, 'custodian': False, 'receiver': False }, 'private': { 'https_enabled': False, }, 'anonymize_outgoing_connections': True, }) # Default request time uniform value self.side_channels_guard = 150 # SOCKS default self.socks_host = "127.0.0.1" self.socks_port = 9050 self.key_bits = 2048 self.csr_sign_bits = 512 self.api_token_len = 32 self.notification_limit = 30 self.jobs_operation_limit = 20 self.user = getpass.getuser() self.group = getpass.getuser() self.uid = os.getuid() self.gid = os.getgid() self.devel_mode = False self.developer_name = '' self.disable_swap = False # Number of failed login enough to generate an alarm self.failed_login_alarm = 5 # Number of minutes in which a user is prevented to login in case of triggered alarm self.failed_login_block_time = 5 # Limit for log sizes and number of log files # https://github.com/globaleaks/GlobaLeaks/issues/1578 self.log_size = 10000000 # 10MB self.log_file_size = 1000000 # 1MB self.num_log_files = self.log_size / self.log_file_size # size used while streaming files self.file_chunk_size = 65535 # 64kb self.AES_key_size = 32 self.AES_key_id_regexp = u'[A-Za-z0-9]{16}' self.AES_counter_nonce = 128 / 8 self.AES_file_regexp = r'(.*)\.aes' self.AES_file_regexp_comp = re.compile(self.AES_file_regexp) self.AES_keyfile_prefix = "aeskey-" self.exceptions = {} self.exceptions_email_count = 0 self.exceptions_email_hourly_limit = 20 self.disable_backend_exception_notification = False self.disable_client_exception_notification = False self.enable_input_length_checks = True self.submission_minimum_delay = 3 # seconds self.submission_maximum_ttl = 3600 # 1 hour self.mail_counters = {} self.mail_timeout = 15 # seconds self.mail_attempts_limit = 3 # per mail limit self.https_socks = [] self.http_socks = [] # TODO holds global state until GLSettings is inverted and this # state managed as an object by the application self.appstate = ObjectDict() self.appstate.process_supervisor = None self.appstate.tor_exit_set = TorExitSet() self.appstate.latest_version = StrictVersion(__version__) self.appstate.api_token_session = None self.appstate.api_token_session_suspended = False self.acme_directory_url = 'https://acme-v01.api.letsencrypt.org/directory'
def db_refresh_memory_variables(session, to_refresh=None): session.flush() tenant_map = { tenant.id: tenant for tenant in session.query(models.Tenant).filter( models.Tenant.active == True) } existing_tids = set(tenant_map.keys()) cached_tids = set(State.tenant_state.keys()) to_remove = cached_tids - existing_tids to_add = existing_tids - cached_tids for tid in to_remove: if tid in State.tenant_state: del State.tenant_state[tid] if tid in State.tenant_cache: del State.tenant_cache[tid] for tid in to_add: State.tenant_state[tid] = TenantState(State) State.tenant_cache[tid] = ObjectDict() if to_refresh is None: to_refresh = tenant_map.keys() else: to_refresh = [tid for tid in to_refresh if tid in tenant_map] if to_refresh: db_refresh_tenant_cache(session, to_refresh) if 1 in to_refresh: to_refresh = State.tenant_cache.keys() db_set_cache_exception_delivery_list(session, State.tenant_cache[1]) if State.tenant_cache[1].admin_api_token_digest: api_id = session.query(models.User.id).filter( models.User.tid == 1, models.User.role == u'admin').order_by( models.User.creation_date).first() if api_id is not None: State.api_token_session = Session(1, api_id, 'admin', 'enabled') rootdomain = State.tenant_cache[1].rootdomain root_onionservice = State.tenant_cache[1].onionservice for tid in to_refresh: if tid not in tenant_map: continue tenant = tenant_map[tid] hostnames = [] onionnames = [] if not tenant.active and tid != 1: continue if rootdomain != '': hostnames.append('p{}.{}'.format(tid, rootdomain)) if root_onionservice != '': onionnames.append('p{}.{}'.format(tid, root_onionservice)) if tenant.subdomain != '': if rootdomain != '': onionnames.append('{}.{}'.format(tenant.subdomain, rootdomain)) if root_onionservice != '': onionnames.append('{}.{}'.format(tenant.subdomain, root_onionservice)) if State.tenant_cache[tid].hostname != '': hostnames.append(State.tenant_cache[tid].hostname) if State.tenant_cache[tid].onionservice != '': onionnames.append(State.tenant_cache[tid].onionservice) State.tenant_cache[tid].hostnames = hostnames State.tenant_cache[tid].onionnames = onionnames State.tenant_hostname_id_map.update( {h: tid for h in hostnames + onionnames})
def db_refresh_memory_variables(session, to_refresh=None): active_tids = set([ tid[0] for tid in session.query(models.Tenant.id).filter( models.Tenant.active.is_(True)) ]) cached_tids = set(State.tenant_state.keys()) # Remove tenants that have been disabled for tid in cached_tids - active_tids: if tid in State.tenant_state: del State.tenant_state[tid] if tid in State.tenant_cache: del State.tenant_cache[tid] # Add tenants that have been enabled for tid in active_tids - cached_tids: State.tenant_state[tid] = TenantState(State) State.tenant_cache[tid] = ObjectDict() if to_refresh is None or 1 in to_refresh: to_refresh = active_tids else: to_refresh = [tid for tid in to_refresh if tid in active_tids] if not len(to_refresh): return db_refresh_tenant_cache(session, to_refresh) root_tenant = State.tenant_cache[1] for tid in to_refresh: tenant = State.tenant_cache[tid] tenant.hostnames = [] tenant.onionnames = [] if tid == 1: log.setloglevel(tenant.log_level) db_set_cache_exception_delivery_list(session, tenant) if tenant.hostname: tenant.hostnames.append(tenant.hostname.encode()) if tenant.onionservice: tenant.onionnames.append(tenant.onionservice.encode()) if tenant.old_onionservice: tenant.onionnames.append(tenant.old_onionservice.encode()) if not tenant.onionservice and root_tenant.onionservice: tenant.onionservice = tenant.subdomain + '.' + root_tenant.onionservice if tenant.subdomain: if root_tenant.rootdomain: tenant.hostnames.append('{}.{}'.format( tenant.subdomain, root_tenant.rootdomain).encode()) if root_tenant.onionservice: tenant.onionnames.append('{}.{}'.format( tenant.subdomain, root_tenant.onionservice).encode()) State.tenant_hostname_id_map.update( {h: tid for h in tenant.hostnames + tenant.onionnames})