def evaluate_update_notification(session, state, latest_version): priv_fact = ConfigFactory(session, 1) stored_latest = priv_fact.get_val('latest_version') # Check if the running version is lower than the latest version if parse_version(stored_latest) >= parse_version(latest_version): return Cache.invalidate() priv_fact.set_val('latest_version', latest_version) # Check to reduce number of email notifications of new updates if parse_version(__version__) != parse_version(stored_latest): return for user_desc in db_get_users(session, 1, 'admin'): if not user_desc['notification']: continue lang = user_desc['language'] template_vars = { 'type': 'software_update_available', 'latest_version': latest_version, 'node': db_admin_serialize_node(session, 1, lang), 'notification': db_get_notification(session, 1, lang), 'user': user_desc, } state.format_and_send_mail(session, 1, user_desc, template_vars)
def check_disk_anomalies(self): """ Here in Alarm is written the threshold to say if we're in disk alarm or not. Therefore the function "report" the amount of free space and the evaluation + alarm shift is performed here. workingdir: is performed a percentage check (at least 1% and an absolute comparison) "unusable node" threshold: happen when the space is really shitty. https://github.com/globaleaks/GlobaLeaks/issues/297 https://github.com/globaleaks/GlobaLeaks/issues/872 """ self.measured_freespace, self.measured_totalspace = get_disk_space( self.state.settings.working_path) disk_space = 0 disk_message = "" accept_submissions = True old_accept_submissions = State.accept_submissions for c in get_disk_anomaly_conditions(self.measured_freespace, self.measured_totalspace): if not c['condition']: continue disk_space = c['alarm_level'] info_msg = c['info_msg']() if disk_space == 2: disk_message = "[FATAL] Disk anomaly, submissions disabled: %s" % info_msg else: # == 1 disk_message = "[WARNING]: Disk anomaly: %s" % info_msg accept_submissions = c['accept_submissions'] break # This check is temporarily, want to be verified that the switch can be # logged as part of the Anomalies via this function old_alarm_level = self.alarm_levels['disk_space'] if old_alarm_level != disk_space: if disk_message: log.err(disk_message) else: log.err("Available disk space returned to normal levels") # the value is set here with a single assignment in order to # minimize possible race conditions resetting/settings the values self.alarm_levels['disk_space'] = disk_space self.alarm_levels['disk_message'] = disk_message # if not on testing change accept_submission to the new value State.accept_submissions = accept_submissions if not self.state.settings.testing else True if old_accept_submissions != State.accept_submissions: log.info("Switching disk space availability from: %s to %s", old_accept_submissions, accept_submissions) # Must invalidate the cache here becuase accept_subs served in /public has changed Cache.invalidate()
def check_disk_anomalies(self): """ Here in Alarm is written the threshold to say if we're in disk alarm or not. Therefore the function "report" the amount of free space and the evaluation + alarm shift is performed here. workingdir: is performed a percentage check (at least 1% and an absolute comparison) "unusable node" threshold: happen when the space is really shitty. https://github.com/globaleaks/GlobaLeaks/issues/297 https://github.com/globaleaks/GlobaLeaks/issues/872 """ self.measured_freespace, self.measured_totalspace = get_disk_space(self.state.settings.working_path) disk_space = 0 disk_message = "" accept_submissions = True old_accept_submissions = State.accept_submissions for c in get_disk_anomaly_conditions(self.measured_freespace, self.measured_totalspace): if c['condition']: disk_space = c['alarm_level'] info_msg = c['info_msg']() if disk_space == 2: disk_message = "[FATAL] Disk anomaly, submissions disabled: %s" % info_msg else: # == 1 disk_message = "[WARNING]: Disk anomaly: %s" % info_msg accept_submissions = c['accept_submissions'] break # This check is temporarily, want to be verified that the switch can be # logged as part of the Anomalies via this function old_alarm_level = self.alarm_levels['disk_space'] if old_alarm_level != disk_space: if disk_message: log.err(disk_message) else: log.err("Available disk space returned to normal levels") # the value is set here with a single assignment in order to # minimize possible race conditions resetting/settings the values self.alarm_levels['disk_space'] = disk_space self.alarm_levels['disk_message'] = disk_message # if not on testing change accept_submission to the new value State.accept_submissions = accept_submissions if not self.state.settings.testing else True if old_accept_submissions != State.accept_submissions: log.info("Switching disk space availability from: %s to %s", old_accept_submissions, accept_submissions) # Must invalidate the cache here becuase accept_subs served in /public has changed Cache.invalidate()
def wrapper(self, *args, **kwargs): c = Cache.get(self.request.tid, self.request.path, self.request.language) if c is None: d = defer.maybeDeferred(f, self, *args, **kwargs) def callback(data): if isinstance(data, (dict, list)): self.request.setHeader(b'content-type', b'application/json') data = json.dumps(data, cls=JSONEncoder) c = self.request.responseHeaders.getRawHeaders( b'Content-type', [b'application/json'])[0] return Cache.set(self.request.tid, self.request.path, self.request.language, c, data)[1] d.addCallback(callback) return d else: self.request.setHeader(b'Content-type', c[0]) return c[1]
def callback(data): if isinstance(data, (dict, list)): self.request.setHeader(b'content-type', b'application/json') data = json.dumps(data) self.request.setHeader(b'Content-encoding', b'gzip') c = self.request.responseHeaders.getRawHeaders(b'Content-type', [b'application/json'])[0] return Cache.set(self.request.tid, self.request.path, self.request.language, c, data)[1]
def evaluate_update_notification(session, state, latest_version): priv_fact = ConfigFactory(session, 1) stored_latest = priv_fact.get_val(u'latest_version') if parse_version(stored_latest) < parse_version(latest_version): Cache.invalidate() priv_fact.set_val(u'latest_version', latest_version) if parse_version(__version__) == parse_version(latest_version): return for user_desc in db_get_admin_users(session, 1): lang = user_desc['language'] template_vars = { 'type': 'software_update_available', 'latest_version': latest_version, 'node': db_admin_serialize_node(session, 1, lang), 'notification': db_get_notification(session, 1, lang), 'user': user_desc, } state.format_and_send_mail(session, 1, user_desc, template_vars)
def init_callback(ret): log.err('Initialization of onion-service %s completed.', ephs.hostname, tid=tid) if not hostname and not key: if tid in State.tenant_cache: self.hs_map[ephs.hostname] = ephs yield set_onion_service_info(tid, ephs.hostname, ephs.private_key) else: yield ephs.remove_from_tor(self.tor_conn.protocol) tid_list = list(set([1, tid])) for x in tid_list: Cache().invalidate(x) yield refresh_memory_variables(tid_list)
def wrapper(self, *args, **kwargs): c = Cache.get(self.request.tid, self.request.path, self.request.language) if c is None: d = defer.maybeDeferred(f, self, *args, **kwargs) def callback(data): if isinstance(data, (dict, list)): self.request.setHeader(b'content-type', b'application/json') data = json.dumps(data) self.request.setHeader(b'Content-encoding', b'gzip') c = self.request.responseHeaders.getRawHeaders(b'Content-type', [b'application/json'])[0] return Cache.set(self.request.tid, self.request.path, self.request.language, c, data)[1] d.addCallback(callback) return d else: self.request.setHeader(b'Content-encoding', b'gzip') self.request.setHeader(b'Content-type', c[0]) return c[1]
def wrapper(self, *args, **kwargs): if self.invalidate_cache: Cache.invalidate(self.request.tid) return f(self, *args, **kwargs)
def setUp(self): yield helpers.TestGL.setUp(self) Cache.invalidate()
def test_cache(self): self.assertEqual(Cache.memory_cache_dict, {}) self.assertIsNone(Cache.get(1, "passante_di_professione", "it")) self.assertIsNone(Cache.get(1, "passante_di_professione", "en")) self.assertIsNone(Cache.get(2, "passante_di_professione", "ca")) Cache.set(1, "passante_di_professione", "it", 'text/plain', 'ititit') Cache.set(1, "passante_di_professione", "en", 'text/plain', 'enenen') Cache.set(2, "passante_di_professione", "ca", 'text/plain', 'cacaca') self.assertTrue( "passante_di_professione" in Cache.memory_cache_dict[1]) self.assertTrue( "passante_di_professione" in Cache.memory_cache_dict[2]) self.assertIsNone(Cache.get(1, "passante_di_professione", "ca")) self.assertTrue( "it" in Cache.memory_cache_dict[1]['passante_di_professione']) self.assertTrue( "en" in Cache.memory_cache_dict[1]['passante_di_professione']) self.assertIsNone(Cache.get(1, "passante_di_professione", "ca")) Cache.invalidate() self.assertEqual(Cache.memory_cache_dict, {})
def test_cache(self): self.assertEqual(Cache.memory_cache_dict, {}) self.assertIsNone(Cache.get(1, "passante_di_professione", "it")) self.assertIsNone(Cache.get(1, "passante_di_professione", "en")) self.assertIsNone(Cache.get(2, "passante_di_professione", "ca")) Cache.set(1, "passante_di_professione", "it", 'text/plain', 'ititit') Cache.set(1, "passante_di_professione", "en", 'text/plain', 'enenen') Cache.set(2, "passante_di_professione", "ca", 'text/plain', 'cacaca') self.assertTrue("passante_di_professione" in Cache.memory_cache_dict[1]) self.assertTrue("passante_di_professione" in Cache.memory_cache_dict[2]) self.assertIsNone(Cache.get(1, "passante_di_professione", "ca")) self.assertTrue("it" in Cache.memory_cache_dict[1]['passante_di_professione']) self.assertTrue("en" in Cache.memory_cache_dict[1]['passante_di_professione']) self.assertIsNone(Cache.get(1, "passante_di_professione", "ca")) self.assertEqual(Cache.get(1, "passante_di_professione", "it")[1], gzipdata('ititit')) self.assertEqual(Cache.get(1, "passante_di_professione", "en")[1], gzipdata('enenen')) self.assertEqual(Cache.get(2, "passante_di_professione", "ca")[1], gzipdata('cacaca')) Cache.invalidate() self.assertEqual(Cache.memory_cache_dict, {})