def setUp(self): self.test_reactor = task.Clock() jobs.base.test_reactor = self.test_reactor token.TokenList.reactor = self.test_reactor runner.test_reactor = self.test_reactor tempdict.test_reactor = self.test_reactor GLSettings.sessions.reactor = self.test_reactor init_glsettings_for_unit_tests() self.setUp_dummy() if self.initialize_test_database_using_archived_db: shutil.copy( os.path.join(TEST_DIR, 'db', 'empty', GLSettings.db_file_name), os.path.join(GLSettings.working_path, 'db', GLSettings.db_file_name) ) else: yield db.init_db() yield db.refresh_memory_variables() for fixture in getattr(self, 'fixtures', []): yield import_fixture(fixture) # override of imported memory variables GLSettings.memory_copy.allow_unencrypted = True Alarm.reset() event.EventTrackQueue.reset() jobs.statistics_sched.StatisticsSchedule.reset() self.internationalized_text = load_appdata()['node']['whistleblowing_button']
def setUp(self): self.test_reactor = task.Clock() jobs.base.test_reactor = self.test_reactor tempdict.test_reactor = self.test_reactor token.TokenList.reactor = self.test_reactor runner.test_reactor = self.test_reactor GLSessions.reactor = self.test_reactor init_glsettings_for_unit_tests() self.setUp_dummy() if self.initialize_test_database_using_archived_db: shutil.copy( os.path.join(TEST_DIR, 'db', 'empty', GLSettings.db_file_name), os.path.join(GLSettings.working_path, 'db', GLSettings.db_file_name) ) else: yield db.init_db(use_single_lang=True) allow_unencrypted = self.encryption_scenario in ['PLAINTEXT', 'MIXED'] yield update_node_setting('allow_unencrypted', allow_unencrypted) yield update_node_setting('submission_minimum_delay', 0) yield db.refresh_memory_variables() Alarm.reset() event.EventTrackQueue.clear() jobs.statistics_sched.StatisticsSchedule.reset() self.internationalized_text = load_appdata()['node']['whistleblowing_button']
def operation(self): Alarm.compute_activity_level() free_disk_bytes, total_disk_bytes = get_workingdir_space() free_ramdisk_bytes, total_ramdisk_bytes = get_ramdisk_space() Alarm.check_disk_anomalies(free_disk_bytes, total_disk_bytes, free_ramdisk_bytes, total_ramdisk_bytes)
def operation(self): from globaleaks.anomaly import Alarm free_mega_bytes = ResourceChecker.get_free_space() alarm = Alarm() alarm.report_disk_usage(free_mega_bytes)
def test_save_anomalies(self): """ How create anomalies: a lots of event + compute stress """ # start test ANOMALIES_COUNT = 50 pollute_events_for_testing(ANOMALIES_COUNT) Alarm.compute_activity_level() anomdet = GLSettings.RecentAnomaliesQ.values()[0] self.assertEqual(len(GLSettings.RecentAnomaliesQ.keys()), 1) original_date = datetime_to_ISO8601(GLSettings.RecentAnomaliesQ.keys()[0]) self.assertTrue(isinstance(anomdet, list)) self.assertTrue(len(anomdet), 2) # alarm level was 2, right ? self.assertEqual(anomdet[1], 2, "Alarm raised is not 2 anymore ?") # every stuff need to be ANOMALIES_AMOUNT * 2, because # pollute function put two event each for event, count in anomdet[0].iteritems(): self.assertEqual(count, ANOMALIES_COUNT * 2) # scheduler happen to save these anomalies, along with stats yield StatisticsSchedule().operation() # now if we get our anomalies, we expect the same 10, right ? AH = yield get_anomaly_history(limit=10) self.assertEqual(original_date, AH[0]['date'])
def test_event_accouting(self): Alarm.compute_activity_level() # create one event per type. for event_obj in event.events_monitored: event.EventTrack(event_obj, 1.0) x = event.EventTrackQueue.take_current_snapshot() self.assertTrue(len(x) > 1)
def test_generate_admin_alert_mail(self): # Remind, these two has to be done to get an event matrix meaningful pollute_events_for_testing() activity_level = yield Alarm.compute_activity_level() x = yield Alarm.generate_admin_alert_mail(event_matrix={ 'wb_comments': 100, 'noise': 12345 })
def test_generate_admin_alert_mail(self): # Remind, these two has to be done to get an event matrix meaningful self.pollute_events() activity_level = yield Alarm.compute_activity_level() self.assertEqual(activity_level, 2) yield Alarm.generate_admin_alert_mail(event_matrix={ 'wb_comments': 100, 'noise': 12345 })
def test_generate_admin_alert_mail(self): # Remind, these two has to be done to get an event matrix meaningful pollute_events_for_testing() activity_level = yield Alarm.compute_activity_level() x = yield Alarm.generate_admin_alert_mail( event_matrix = { 'wb_comments': 100, 'noise': 12345 } )
def operation(self): """ The routine periodically checks is checked if the system is having some anomalies If the alarm has been raises, it is logged in the db. """ yield Alarm.compute_activity_level() free_disk_bytes, total_disk_bytes = get_workingdir_space() free_ramdisk_bytes, total_ramdisk_bytes = get_ramdisk_space() Alarm.check_disk_anomalies(free_disk_bytes, total_disk_bytes, free_ramdisk_bytes, total_ramdisk_bytes)
def generate_token_challenge(self, challenges_dict=None): # initialization self.human_captcha = False self.graph_captcha = False self.proof_of_work = False if challenges_dict is None: challenges_dict = Alarm().get_token_difficulty() if challenges_dict['human_captcha']: random_a = randint(0, 99) random_b = randint(0, 99) self.human_captcha = { 'question': u"%d + %d" % (random_a, random_b), 'answer': u"%d" % (random_a + random_b) } if challenges_dict['proof_of_work']: # still not implemented pass if challenges_dict['graph_captcha']: # still not implemented pass
def test_put_wrong_answer(self): self.pollute_events() yield Alarm.compute_activity_level() token = Token('submission') orig_question = u'77+33' token.human_captcha = { 'question': orig_question, 'answer': 1, 'solved': False } request_payload = token.serialize() request_payload['human_captcha_answer'] = 883 handler = self.request(request_payload) new_token = yield handler.put(token.id) self.assertFalse(token.human_captcha['solved']) self.assertEqual(new_token['human_captcha'], token.human_captcha['question']) self.assertNotEqual(new_token['human_captcha'], orig_question) self.assertIsNot(new_token['human_captcha'], False) self.assertNotIn('human_captcha_anwser', new_token)
def operation(self, alarm_enable=True): """ Every X seconds is checked if anomalies are happening from anonymous interaction (submission/file/comments/whatever flood) If the alarm has been raise, logs in the DB the event. """ yield Alarm.compute_activity_level()
def post(self): """ Request: None Response: SubmissionDesc (Token) Errors: ContextIdNotFound, InvalidInputFormat, SubmissionValidationFailure This API create a Token, a temporary memory only object able to keep track of the submission. If the system is under stress, complete the submission will require some actions to be performed before the submission can be concluded (e.g. hashcash and captchas). """ if not GLSettings.memory_copy.accept_submissions: raise errors.SubmissionDisabled request = self.validate_message(self.request.body, requests.SubmissionDesc) token = Token('submission', request['context_id']) token.set_difficulty(Alarm().get_token_difficulty()) token_answer = token.serialize_token() token_answer.update({'id': token_answer['token_id']}) token_answer.update({'context_id': request['context_id']}) token_answer.update({'receivers': []}) token_answer.update({'answers': {}}) token_answer.update({'human_captcha_answer': 0}) token_answer.update({'graph_captcha_answer': ""}) token_answer.update({'proof_of_work': 0}) self.set_status(201) # Created self.finish(token_answer)
def test_compute_activity_level(self): """ remind: activity level is called every 30 seconds by """ pollute_events_for_testing() previous_len = len(event.EventTrackQueue.take_current_snapshot()) pollute_events_for_testing() self.assertEqual(len(event.EventTrackQueue.take_current_snapshot()), previous_len * 2) activity_level = yield Alarm.compute_activity_level() self.assertEqual(activity_level, 2) # Has not slow comeback to 0 activity_level = yield Alarm.compute_activity_level() self.assertEqual(activity_level, 0)
def operation(self): """ Every X seconds is checked if anomalies are happening from anonymous interaction (submission/file/comments/whatever flood) If the alarm has been raise, logs in the DB the event. This copy data inside StatisticsSchedule.RecentAnomaliesQ """ yield Alarm.compute_activity_level()
def setUp(self): yield helpers.TestGL.setUp(self) pollute_events_for_testing() yield Alarm.compute_activity_level() # Token submission st = Token('submission') st.generate_token_challenge()
def test_compute_activity_level(self): """ remind: activity level is called every 30 seconds by """ pollute_events_for_testing() previous_len = len(event.EventTrackQueue.take_current_snapshot()) pollute_events_for_testing() self.assertEqual(len( event.EventTrackQueue.take_current_snapshot() ), previous_len * 2) activity_level = yield Alarm.compute_activity_level() self.assertEqual(activity_level, 2) # Has not slow comeback to 0 activity_level = yield Alarm.compute_activity_level() self.assertEqual(activity_level, 0)
def test_post(self): yield Alarm.compute_activity_level() handler = self.request({'type': 'submission'}) yield handler.post() token = self.responses[0] self.assert_default_token_values(token)
def test_post(self): yield Alarm.compute_activity_level() handler = self.request({'type': 'submission'}) handler.request.client_using_tor = True response = yield handler.post() self.assert_default_token_values(response)
def setUp(self): yield helpers.TestGL.setUp(self) # This is at the beginning event.EventTrackQueue.reset() pollute_events_for_testing() yield Alarm.compute_activity_level() # Token submission st = Token('submission') st.generate_token_challenge()
def setUp(self): test_config.skipCase(self) self.test_reactor = task.Clock() jobs.base.test_reactor = self.test_reactor tempdict.test_reactor = self.test_reactor token.TokenList.reactor = self.test_reactor GLSessions.reactor = self.test_reactor init_glsettings_for_unit_tests() self.setUp_dummy() if self.initialize_test_database_using_archived_db: shutil.copy( os.path.join(TEST_DIR, 'db', 'empty', GLSettings.db_file_name), os.path.join(GLSettings.working_path, 'db', GLSettings.db_file_name)) else: yield db.init_db(use_single_lang=True) allow_unencrypted = self.encryption_scenario in ['PLAINTEXT', 'MIXED'] yield update_node_setting('allow_unencrypted', allow_unencrypted) yield db.refresh_memory_variables() sup = ProcessSupervisor([], '127.0.0.1', 8082) GLSettings.state.process_supervisor = sup Alarm.reset() event.EventTrackQueue.clear() GLSettings.reset_hourly() GLSettings.submission_minimum_delay = 0 self.internationalized_text = load_appdata( )['node']['whistleblowing_button']
def test_put_wrong_answer(self): self.pollute_events() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX','answer': 1, 'solved': False} request_payload = token.serialize() request_payload['human_captcha_answer'] = 883 handler = self.request(request_payload) self.assertRaises(errors.TokenFailure, handler.put, token.id)
def test_put_wrong_answer(self): pollute_events_for_testing() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX', 'answer': 1, 'solved': False} request_payload = token.serialize() request_payload['human_captcha_answer'] = 883 handler = self.request(request_payload) self.assertRaises(errors.TokenFailure, handler.put, token.id)
def test_put_right_answer(self): pollute_events_for_testing() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX', 'answer': 1} token.proof_of_work = False request_payload = token.serialize() request_payload['human_captcha_answer'] = 1 handler = self.request(request_payload) yield handler.put(token.id) self.assertEqual(self.responses[0]['human_captcha'], False)
def test_put_right_answer(self): pollute_events_for_testing() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX','answer': 1} token.proof_of_work = False request_payload = token.serialize() request_payload['human_captcha_answer'] = 1 handler = self.request(request_payload) yield handler.put(token.id) self.assertEqual(self.responses[0]['human_captcha'], False)
def test_put_right_answer(self): self.pollute_events() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX', 'answer': 1, 'solved': False} token.proof_of_work['solved'] = True request_payload = token.serialize() request_payload['human_captcha_answer'] = 1 handler = self.request(request_payload) response = yield handler.put(token.id) token.use() self.assertFalse(response['human_captcha']) self.assertTrue(token.human_captcha['solved'])
def test_put_right_answer(self): self.pollute_events() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX','answer': 1, 'solved': False} token.proof_of_work['solved'] = True request_payload = token.serialize() request_payload['human_captcha_answer'] = 1 handler = self.request(request_payload) yield handler.put(token.id) token.use() self.assertFalse(self.responses[0]['human_captcha']) self.assertTrue(token.human_captcha['solved'])
def test_put_wrong_answer(self): event.EventTrackQueue.reset() pollute_events_for_testing() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX','answer': 1} token.proof_of_work = False request_payload = token.serialize() request_payload['human_captcha_answer'] = 2 handler = self.request(request_payload) yield handler.put(token.id) self.assertNotEqual(self.responses[0]['human_captcha'], False) # verify that the question is changed self.assertNotEqual(self.responses[0]['human_captcha'], 'XXX')
def test_put_wrong_answer(self): event.EventTrackQueue.reset() pollute_events_for_testing() yield Alarm.compute_activity_level() token = Token('submission') token.human_captcha = {'question': 'XXX', 'answer': 1} token.proof_of_work = False request_payload = token.serialize() request_payload['human_captcha_answer'] = 2 handler = self.request(request_payload) yield handler.put(token.id) self.assertNotEqual(self.responses[0]['human_captcha'], False) # verify that the question is changed self.assertNotEqual(self.responses[0]['human_captcha'], 'XXX')
def setUp(self): yield helpers.TestGL.setUp(self) pollute_events_for_testing() yield Alarm.compute_activity_level()
def setUp(self): yield helpers.TestGL.setUp(self) self.pollute_events() yield Alarm.compute_activity_level()
def getAlarm(state): from globaleaks.anomaly import Alarm return Alarm(state)
def setUp(self): yield helpers.TestGL.setUp(self) TokenList.clear() self.pollute_events() yield Alarm.compute_activity_level()
def operation(self): free_bytes = ResourceChecker.get_free_space() alarm = Alarm() alarm.report_disk_usage(free_bytes)
def operation(self): free_disk_bytes, total_disk_bytes = get_workingdir_space() free_ramdisk_bytes, total_ramdisk_bytes = get_ramdisk_space() alarm = Alarm() alarm.check_disk_anomalies(free_disk_bytes, total_disk_bytes, free_ramdisk_bytes, total_ramdisk_bytes)
def operation(self): free_disk_bytes, total_disk_bytes = ResourceChecker.get_workingdir_space() free_ramdisk_bytes, total_ramdisk_bytes = ResourceChecker.get_ramdisk_space() alarm = Alarm() alarm.check_disk_anomalies(free_disk_bytes, total_disk_bytes, free_ramdisk_bytes, total_ramdisk_bytes)