def set_passwords_ready_to_expire(self, session, tid): session.query(models.User) \ .filter(models.User.tid == tid) \ .update({ 'password_change_date': datetime_null(), 'password_change_needed': False, })
def test_utc_dynamic_date(self): a = utility.utc_dynamic_date(utility.datetime_null()) b = utility.utc_dynamic_date(utility.datetime_null(), seconds=0, minutes=0, hours=0) self.assertTrue(a == b) c = utility.utc_dynamic_date(utility.datetime_null(), seconds=121, minutes=120, hours=0) d = utility.utc_dynamic_date(utility.datetime_null(), seconds=61, minutes=61, hours=1) e = utility.utc_dynamic_date(utility.datetime_null(), seconds=1, minutes=2, hours=2) self.assertEqual(c, d) self.assertEqual(d, e) f = utility.utc_dynamic_date(c, seconds=121, minutes=120, hours=0) g = utility.utc_dynamic_date(d, seconds=61, minutes=61, hours=1) h = utility.utc_dynamic_date(e, seconds=1, minutes=2, hours=2) self.assertEqual(c, d) self.assertEqual(d, e)
def migrate_Receiver(self): gpgobj = GLBPGP() old_receivers = self.store_old.find(self.model_from['Receiver']) for old_receiver in old_receivers: new_receiver = self.model_to['Receiver']() gpg_key_expiration = datetime_null() if old_receiver.gpg_key_armor: try: gpg_key_expiration = gpgobj.load_key(old_receiver.gpg_key_armor)['expiration'] except Exception: pass for _, v in new_receiver._storm_columns.iteritems(): if v.name == 'gpg_key_status': if old_receiver.gpg_key_status == u'Enabled': new_receiver.gpg_key_status = u'enabled' else: new_receiver.gpg_key_status = u'disabled' continue if v.name == 'gpg_key_expiration': new_receiver.gpg_key_expiration = gpg_key_expiration continue setattr(new_receiver, v.name, getattr(old_receiver, v.name)) self.store_new.add(new_receiver) gpgobj.destroy_environment()
def test_datetime_to_ISO8601_to_datetime_to_dot_dot_dot(self): a = utility.datetime_null() b = utility.datetime_to_ISO8601(a) c = utility.ISO8601_to_datetime(b) d = utility.datetime_to_ISO8601(c) self.assertTrue(a, c) self.assertTrue(b, d)
def migrate_User(self): print "%s User migration assistant: (language, timezone," \ "password_change_needed, password_change_date)" % self.std_fancy old_users = self.store_old.find(self.get_right_model("User", 14)) for old_user in old_users: new_user = self.get_right_model("User", 15)() for _, v in new_user._storm_columns.iteritems(): if v.name == 'language': new_user.language = u'en' continue if v.name == 'timezone': new_user.timezone = 0 continue if v.name == 'password_change_needed': new_user.password_change_needed = False continue if v.name == 'password_change_date': new_user.password_change_date = datetime_null() continue setattr(new_user, v.name, getattr(old_user, v.name)) self.store_new.add(new_user) self.store_new.commit()
def perform_pgp_validation_checks(self, store): expired_or_expiring = [] for user in store.find(models.User): if user.pgp_key_public and user.pgp_key_expiration != datetime_null(): if user.pgp_key_expiration < datetime_now(): expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language)) if GLSettings.memory_copy.allow_unencrypted: # The PGP key status should be downgraded only if the node # accept non PGP mails/files to be sent/stored. # If the node wont accept this the pgp key status # will remain enabled and mail won't be sent by regular flow. user.pgp_key_status = u'disabled' user.pgp_key_info = None user.pgp_key_public = None user.pgp_key_fingerprint = None user.pgp_key_expiration = None elif user.pgp_key_expiration < datetime_now() - timedelta(days=15): expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language)) if expired_or_expiring: if not GLSettings.memory_copy.disable_admin_notification_emails: self.prepare_admin_pgp_alerts(store, expired_or_expiring) for user_desc in expired_or_expiring: self.prepare_user_pgp_alerts(store, user_desc)
def db_get_expired_or_expiring_pgp_users(session, tids_list): threshold = datetime_now() + timedelta(days=15) return session.query(models.User).filter(models.User.pgp_key_public != u'', models.User.pgp_key_expiration != datetime_null(), models.User.pgp_key_expiration < threshold, models.UserTenant.user_id == models.User.id, models.UserTenant.tenant_id.in_(tids_list))
def wizard(store, request, language): node = NodeFactory(store) if node.get_val('wizard_done'): # TODO report as anomaly log.err("DANGER: Wizard already initialized!") raise errors.ForbiddenOperation try: node._query_group() nn = unicode(request['node']['name']) node.set_val('name', nn) node.set_val('default_language', language) node.set_val('wizard_done', True) node_l10n = NodeL10NFactory(store) node_l10n.set_val('description', language, nn) node_l10n.set_val('header_title_homepage', language, nn) node_l10n.set_val('presentation', language, nn) context = db_create_context(store, request['context'], language) langs_to_drop = EnabledLanguage.list(store) langs_to_drop.remove(language) if len(langs_to_drop): EnabledLanguage.remove_old_langs(store, langs_to_drop) request['receiver']['contexts'] = [context.id] request['receiver']['language'] = language db_create_receiver(store, request['receiver'], language) admin_dict = { 'username': u'admin', 'password': request['admin']['password'], 'role': u'admin', 'state': u'enabled', 'deletable': False, 'name': u'Admin', 'public_name': u'Admin', 'description': u'', 'mail_address': request['admin']['mail_address'], 'language': language, 'password_change_needed': False, 'pgp_key_remove': False, 'pgp_key_fingerprint': '', 'pgp_key_public': '', 'pgp_key_expiration': datetime_null() } db_create_admin_user(store, admin_dict, language) except Exception as excep: log.err("Failed wizard initialization %s" % excep) raise excep
def update_defaults(self, group, langs, data, reset=False): null = datetime_null() for lang in langs: old_keys = [] for cfg in self.get_all(group, lang): old_keys.append(cfg.var_name) if (cfg.update_date == null or reset) and cfg.var_name in data: cfg.value = data[cfg.var_name][lang] ConfigL10NFactory.initialize(self, list(set(ConfigL10NFilters[group]) - set(old_keys)), lang, data)
def init_db(store): db_create_tables(store) appdata_dict = db_update_appdata(store) log.debug("Performing database initialization...") node = models.Node() node.wizard_done = GLSettings.skip_wizard node.receipt_salt = generateRandomSalt() for k in appdata_dict['node']: setattr(node, k, appdata_dict['node'][k]) notification = models.Notification() for k in appdata_dict['templates']: setattr(notification, k, appdata_dict['templates'][k]) logo = '' with open(os.path.join(GLSettings.client_path, 'logo.png'), 'r') as logo_file: logo = logo_file.read() node.logo = models.File() node.logo.data = base64.b64encode(logo) store.add(node) store.add(notification) admin_dict = { 'username': u'admin', 'password': u'globaleaks', 'deeletable': False, 'role': u'admin', 'state': u'enabled', 'deletable': False, 'name': u'Admin', 'description': u'', 'mail_address': u'', 'language': node.default_language, 'timezone': node.default_timezone, 'password_change_needed': False, 'pgp_key_remove': False, 'pgp_key_status': 'disabled', 'pgp_key_info': '', 'pgp_key_fingerprint': '', 'pgp_key_public': '', 'pgp_key_expiration': datetime_null() } admin = db_create_admin_user(store, admin_dict, node.default_language) admin.password_change_needed = False
def emulate_file_upload(self, token, n): """ This emulate the file upload of a incomplete submission """ for i in range(0, n): dummyFile = self.get_dummy_file() dummyFile = yield threads.deferToThread(files.dump_file_fs, dummyFile) dummyFile['creation_date'] = datetime_null() f = files.serialize_memory_file(dummyFile) token.associate_file(dummyFile) self.assertFalse({'size', 'content_type', 'name', 'creation_date'} - set(f.keys()))
def _transact_with_stuff_failing(self, store): r = self.localization_set(self.dummyReceiver_1, Receiver, 'en') receiver_user = User(self.dummyReceiverUser_1) receiver_user.last_login = self.dummyReceiverUser_1['last_login'] receiver_user.password_change_needed = self.dummyReceiverUser_1['password_change_needed'] receiver_user.password_change_date = datetime_null() receiver_user.mail_address = self.dummyReceiverUser_1['mail_address'] store.add(receiver_user) receiver = Receiver(r) receiver.user_id = receiver_user.id receiver.pgp_key_status = u'disabled' store.add(receiver) raise exceptions.DisconnectionError
def __init__(self, state): self.state = state self.last_alarm_email = datetime_null() self.event_matrix = {} self.measured_freespace = 0 self.measured_totalspace = 0 self.alarm_levels = { 'disk_space': 0, 'disk_message': None, 'activity': 0 }
def migrate_InternalTip(self): old_objs = self.store_old.find(self.model_from['InternalTip']) for old_obj in old_objs: new_obj = self.model_to['InternalTip']() for _, v in new_obj._storm_columns.iteritems(): if v.name == 'update_date': new_obj.update_date = old_obj.creation_date continue if v.name == 'identity_provided': new_obj.identity_provided = False continue if v.name == 'identity_provided_date': new_obj.identity_provided_date = datetime_null() continue if v.name == 'total_score': new_obj.total_score = 0 continue if v.name == 'enable_comments': new_obj.enable_comments = old_obj.context.enable_comments continue if v.name == 'enable_messages': new_obj.enable_messages = old_obj.context.enable_private_messages continue if v.name == 'enable_two_way_comments': new_obj.enable_two_way_comments = True continue if v.name == 'enable_two_way_messages': new_obj.enable_two_way_messages = True continue if v.name == 'enable_attachments': new_obj.enable_attachments = True continue if v.name == 'enable_whistleblower_identity': new_obj.enable_whistleblower_identity = False continue setattr(new_obj, v.name, getattr(old_obj, v.name)) self.store_new.add(new_obj)
def test_session_management_sched(self): new_session = OD( refreshdate=datetime_null(), # new but expired session! id="admin", role="admin", user_id="admin" ) GLSetting.sessions['111'] = new_session GLSetting.sessions['222'] = new_session GLSetting.sessions['333'] = new_session yield session_management_sched.SessionManagementSchedule().operation() self.assertEqual(len(GLSetting.sessions), 0)
def init_db(store): if GLSettings.db_type == 'sqlite': db_create_tables(store) appdata_dict = db_init_appdata(store) log.debug("Performing database initialization...") node = models.Node() node.wizard_done = GLSettings.skip_wizard node.receipt_salt = generateRandomSalt() for k in appdata_dict['node']: setattr(node, k, appdata_dict['node'][k]) notification = models.Notification() for k in appdata_dict['templates']: setattr(notification, k, appdata_dict['templates'][k]) store.add(node) store.add(notification) load_default_questionnaires(store) load_default_fields(store) admin_dict = { 'username': u'admin', 'password': u'globaleaks', 'deeletable': False, 'role': u'admin', 'state': u'enabled', 'deletable': False, 'name': u'Admin', 'description': u'', 'mail_address': u'', 'language': node.default_language, 'timezone': node.default_timezone, 'password_change_needed': False, 'pgp_key_remove': False, 'pgp_key_status': 'disabled', 'pgp_key_info': '', 'pgp_key_fingerprint': '', 'pgp_key_public': '', 'pgp_key_expiration': datetime_null() } admin = db_create_admin(store, admin_dict, node.default_language) admin.password_change_needed = False
def emulate_file_upload(self, token, n): """ This emulates the file upload of an incomplete submission """ for i in range(0, n): dummyFile = self.get_dummy_file() dst = os.path.join(GLSettings.submission_path, os.path.basename(dummyFile['path'])) dummyFile = yield threads.deferToThread(write_upload_encrypted_to_disk, dummyFile, dst) dummyFile['date'] = datetime_null() token.associate_file(dummyFile) dummyFile['body'].close()
def pgp_validation_check(self, store): expired_or_expiring = [] for rcvr in store.find(models.Receiver): if rcvr.user.pgp_key_public and rcvr.user.pgp_key_expiration != datetime_null(): if rcvr.user.pgp_key_expiration < datetime_now(): expired_or_expiring.append(admin_serialize_receiver(rcvr, GLSettings.memory_copy.default_language)) if GLSettings.memory_copy.allow_unencrypted: # The PGP key status should be downgraded only if the node # accept non PGP mails/files to be sent/stored. # If the node wont accept this the pgp key status # will remain enabled and mail won't be sent by regular flow. rcvr.user.pgp_key_status = u"disabled" elif rcvr.user.pgp_key_expiration < datetime_now() - timedelta(days=15): expired_or_expiring.append(admin_serialize_receiver(rcvr, GLSettings.memory_copy.default_language)) return expired_or_expiring
def test_001_successful_session_expiry_on_admin_auth_request(self): date1 = utility.datetime_null() # oh a very old date! GLSetting.sessions = {} GLSetting.sessions[u'antani'] = ObjectDict() GLSetting.sessions[u'antani']['user_id'] = u'admin' GLSetting.sessions[u'antani']['role'] = u'admin' GLSetting.sessions[u'antani']['id'] = u'antani' GLSetting.sessions[u'antani']['refreshdate'] = date1 handler = self.request({}, headers={'X-Session': 'antani'}) self.assertRaises(errors.AdminSessionExpired, handler.get) self.assertTrue(handler.current_user is None) self.assertEqual(len(GLSetting.sessions.keys()), 0)
def perform_pgp_validation_checks(self, store): expired_or_expiring = [] for user in db_get_expired_or_expiring_pgp_users(store): expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language)) if user.pgp_key_expiration < datetime_now(): user.pgp_key_public = '' user.pgp_key_fingerprint = '' user.pgp_key_expiration = datetime_null() if len(expired_or_expiring): if not GLSettings.memory_copy.notif.disable_admin_notification_emails: self.prepare_admin_pgp_alerts(store, expired_or_expiring) for user_desc in expired_or_expiring: self.prepare_user_pgp_alerts(store, user_desc)
def _transact_with_stuff(self, store): r = self.localization_set(self.dummyReceiver_1, Receiver, 'en') receiver_user = User(self.dummyReceiverUser_1) receiver_user.last_login = self.dummyReceiverUser_1['last_login'] receiver_user.password_change_needed = self.dummyReceiverUser_1['password_change_needed'] receiver_user.password_change_date = datetime_null() # Avoid receivers with the same username! receiver_user.username = unicode("xxx") store.add(receiver_user) receiver = Receiver(r) receiver.user_id = receiver_user.id receiver.pgp_key_status = u'disabled' receiver.mail_address = self.dummyReceiver_1['mail_address'] store.add(receiver) return receiver.id
def perform_pgp_validation_checks(self, store): expired_or_expiring = [] for user in store.find(models.User): if user.pgp_key_public and user.pgp_key_expiration != datetime_null(): if user.pgp_key_expiration < datetime_now(): expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language)) user.pgp_key_public = None user.pgp_key_fingerprint = None user.pgp_key_expiration = None elif user.pgp_key_expiration < datetime_now() - timedelta(days=15): expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language)) if expired_or_expiring: if not GLSettings.memory_copy.notif.disable_admin_notification_emails: self.prepare_admin_pgp_alerts(store, expired_or_expiring) for user_desc in expired_or_expiring: self.prepare_user_pgp_alerts(store, user_desc)
def migrate_User(self): # Receivers and Users are migrated all together this time! # The only user to be migrated separately is the admin old_user_model = self.model_from["User"] old_admin = self.store_old.find(old_user_model, old_user_model.username == u"admin").one() new_admin = self.model_to["User"]() for _, v in new_admin._storm_columns.iteritems(): if v.name == "name": new_admin.name = u"Admin" continue if v.name == "description": new_admin.description = {"en": ""} continue if v.name == "deletable": new_admin.deletable = False continue if v.name == "pgp_key_status": new_admin.pgp_key_status = "disabled" continue if v.name == "pgp_key_info": new_admin.pgp_key_info = "" continue if v.name == "pgp_key_fingerprint": new_admin.pgp_key_fingerprint = "" continue if v.name == "pgp_key_public": new_admin.pgp_key_public = "" continue if v.name == "pgp_key_expiration": new_admin.pgp_key_expiration = datetime_null() continue setattr(new_admin, v.name, getattr(old_admin, v.name)) self.store_new.add(new_admin)
def migrate_User(self): # Receivers and Users are migrated all together this time! # The only user to be migrated separately is the admin old_user_model = self.model_from['User'] old_admin = self.store_old.find(old_user_model, old_user_model.username == u'admin').one() new_admin = self.model_to['User']() for _, v in new_admin._storm_columns.iteritems(): if v.name == 'name': new_admin.name = u'Admin' continue if v.name == 'description': new_admin.description = {'en': ''} continue if v.name == 'deletable': new_admin.deletable = False continue if v.name == 'pgp_key_status': new_admin.pgp_key_status = 'disabled' continue if v.name == 'pgp_key_info': new_admin.pgp_key_info = '' continue if v.name == 'pgp_key_fingerprint': new_admin.pgp_key_fingerprint = '' continue if v.name == 'pgp_key_public': new_admin.pgp_key_public = '' continue if v.name == 'pgp_key_expiration': new_admin.pgp_key_expiration = datetime_null() continue setattr(new_admin, v.name, getattr(old_admin, v.name)) self.store_new.add(new_admin)
def test_successful_session_expiry_on_auth_request(self): date1 = datetime_null() # oh a very old date! GLSetting.sessions = {} GLSetting.sessions[u'antani'] = ObjectDict() GLSetting.sessions[u'antani']['user_id'] = u'admin' GLSetting.sessions[u'antani']['role'] = u'admin' GLSetting.sessions[u'antani']['id'] = u'antani' GLSetting.sessions[u'antani']['refreshdate'] = date1 handler = self.request({}, headers={'X-Session': 'antani'}) # this request raise an exception for session expiration try: yield handler.get() except: pass self.assertTrue(handler.current_user is None) self.assertEqual(len(GLSetting.sessions.keys()), 0)
def parse_pgp_options(user, request): """ Used for parsing PGP key infos and fill related user configurations. """ pgp_key_public = request['pgp_key_public'] remove_key = request['pgp_key_remove'] k = None if not remove_key and pgp_key_public: pgpctx = PGPContext(State.settings.tmp_path) k = pgpctx.load_key(pgp_key_public) if k is not None: user.pgp_key_public = pgp_key_public user.pgp_key_fingerprint = k['fingerprint'] user.pgp_key_expiration = k['expiration'] else: user.pgp_key_public = '' user.pgp_key_fingerprint = '' user.pgp_key_expiration = datetime_null()
def migrate_InternalTip(self): old_objs = self.session_old.query(self.model_from['InternalTip']) for old_obj in old_objs: new_obj = self.model_to['InternalTip']() old_wbtip_model = self.model_from['WhistleblowerTip'] old_wbtip = self.session_old.query(old_wbtip_model).filter(old_wbtip_model.internaltip_id == old_obj.id).one() if old_wbtip is None: self.entries_count['InternalTip'] -= 1 continue for key in [c.key for c in new_obj.__table__.columns]: if key == 'wb_last_access': if old_wbtip.last_access != datetime_null(): new_obj.wb_last_access = old_wbtip.last_access else: new_obj.last_access = old_obj.creation_date else: setattr(new_obj, key, getattr(old_obj, key)) self.session_new.add(new_obj)
def perform_pgp_validation_checks(self, session): tenant_expiry_map = {1: []} for user in db_get_expired_or_expiring_pgp_users(session, self.state.tenant_cache.keys()): user_desc = user_serialize_user(session, user, user.language) tenant_expiry_map.setdefault(user.tid, []).append(user_desc) log.info('Removing expired PGP key of: %s', user.username, tid=user.tid) if user.pgp_key_expiration < datetime_now(): user.pgp_key_public = '' user.pgp_key_fingerprint = '' user.pgp_key_expiration = datetime_null() for tid, expired_or_expiring in tenant_expiry_map.items(): for user_desc in expired_or_expiring: self.prepare_user_pgp_alerts(session, tid, user_desc) if self.state.tenant_cache[tid].notification.disable_admin_notification_emails: continue if expired_or_expiring: self.prepare_admin_pgp_alerts(session, tid, expired_or_expiring)
def pgp_validation_check(self, store): node_desc = db_admin_serialize_node(store, 'en') expired_or_expiring = [] rcvrs = store.find(Receiver) for rcvr in rcvrs: if rcvr.pgp_key_public and rcvr.pgp_key_expiration != datetime_null(): if rcvr.pgp_key_expiration < datetime_now(): expired_or_expiring.append(admin_serialize_receiver(rcvr, GLSetting.memory_copy.language)) if node_desc['allow_unencrypted']: # The PGP key status should be downgraded only if the node # accept non PGP mails/files to be sent/stored. # If the node wont accept this the pgp key status # will remain enabled and mail won't be sent by regular flow. rcvr.pgp_key_status = u'disabled' elif rcvr.pgp_key_expiration < datetime_now() - timedelta(days=15): expired_or_expiring.append(admin_serialize_receiver(rcvr, GLSetting.memory_copy.language)) return expired_or_expiring
def migrate_Receiver(self): print "%s Receiver migration assistant" % self.std_fancy gpgobj = GLBPGP() old_receivers = self.store_old.find(self.get_right_model("Receiver", 16)) for old_receiver in old_receivers: new_receiver = self.get_right_model("Receiver", 17)() gpg_key_expiration = datetime_null() if old_receiver.gpg_key_armor: try: gpg_key_expiration = gpgobj.load_key(old_receiver.gpg_key_armor)['expiration'] except: pass for _, v in new_receiver._storm_columns.iteritems(): if v.name == 'gpg_key_status': if old_receiver.gpg_key_status == u'Enabled': new_receiver.gpg_key_status = u'enabled' else: new_receiver.gpg_key_status = u'disabled' continue if v.name == 'gpg_key_expiration': new_receiver.gpg_key_expiration = gpg_key_expiration continue setattr(new_receiver, v.name, getattr(old_receiver, v.name)) self.store_new.add(new_receiver) self.store_new.commit() gpgobj.destroy_environment()
def force_itip_expiration(self, store): store.find(models.InternalTip).set(expiration_date=datetime_null())
def force_wbtip_expiration(self, store): for itip in store.find(models.InternalTip): itip.wb_last_access = datetime_null()
def force_tip_expire(self, store): tips = store.find(models.InternalTip) for tip in tips: tip.expiration_date = datetime_null()
def db_create_submission(session, tid, request, token, client_using_tor): encryption = db_get( session, models.Config, (models.Config.tid == tid, models.Config.var_name == 'encryption')) crypto_is_available = encryption.value tenant = db_get(session, models.Tenant, models.Tenant.id == tid) context, questionnaire = db_get( session, (models.Context, models.Questionnaire), (models.Context.id == request['context_id'], models.Questionnaire.id == models.Context.questionnaire_id)) answers = request['answers'] steps = db_get_questionnaire(session, tid, questionnaire.id, None)['steps'] questionnaire_hash = db_archive_questionnaire_schema(session, steps) crypto_tip_pub_key = '' receivers = [] for r in session.query(models.User).filter( models.User.id.in_(request['receivers'])): if crypto_is_available: if r.crypto_pub_key: # This is the regular condition of systems setup on Globaleaks 4 # Since this version, encryption is enabled by default and # users need to perform their first access before they # could receive reports. receivers.append(r) elif encryption.update_date != datetime_null(): # This is the exceptional condition of systems setup when # encryption was implemented via PGP. # For continuity reason of those production systems # encryption could not be enforced. receivers.append(r) crypto_is_available = False else: receivers.append(r) if not receivers: raise errors.InputValidationError( "Unable to deliver the submission to at least one recipient") if 0 < context.maximum_selectable_receivers < len(request['receivers']): raise errors.InputValidationError( "The number of recipients selected exceed the configured limit") if crypto_is_available: crypto_tip_prv_key, crypto_tip_pub_key = GCE.generate_keypair() itip = models.InternalTip() itip.tid = tid itip.status = 'new' itip.crypto_tip_pub_key = crypto_tip_pub_key itip.progressive = db_assign_submission_progressive(session, tid) if context.tip_timetolive > 0: itip.expiration_date = get_expiration(context.tip_timetolive) # Evaluate the score level itip.total_score = request['total_score'] # The status https is used to keep track of the security level adopted by the whistleblower itip.https = not client_using_tor itip.mobile = request['mobile'] itip.context_id = context.id itip.enable_two_way_comments = context.enable_two_way_comments itip.enable_two_way_messages = context.enable_two_way_messages itip.enable_attachments = context.enable_attachments x = session.query(models.Field, models.FieldAttr.value) \ .filter(models.Field.template_id == 'whistleblower_identity', models.Field.step_id == models.Step.id, models.Step.questionnaire_id == context.questionnaire_id, models.FieldAttr.field_id == models.Field.id, models.FieldAttr.name == 'visibility_subject_to_authorization').one_or_none() whistleblower_identity = None can_access_whistleblower_identity = True if x: whistleblower_identity = x[0] can_access_whistleblower_identity = not x[1] itip.enable_whistleblower_identity = whistleblower_identity is not None session.add(itip) session.flush() # Evaluate if the whistleblower tip should be generated if ((not State.tenant_cache[tid].enable_scoring_system) or (context.score_threshold_receipt == 0) or (context.score_threshold_receipt == 1 and itip.total_score >= 2) or (context.score_threshold_receipt == 2 and itip.total_score == 3)): receipt = GCE.generate_receipt() receipt_salt = State.tenant_cache[tid].receipt_salt wbtip = models.WhistleblowerTip() wbtip.id = itip.id wbtip.tid = tid wbtip.hash_alg = 'ARGON2' wbtip.receipt_hash = GCE.hash_password(receipt, receipt_salt) # Evaluate if the whistleblower tip should be encrypted if crypto_is_available: crypto_tip_prv_key, itip.crypto_tip_pub_key = GCE.generate_keypair( ) wb_key = GCE.derive_key(receipt.encode(), receipt_salt) wb_prv_key, wb_pub_key = GCE.generate_keypair() wbtip.crypto_prv_key = Base64Encoder.encode( GCE.symmetric_encrypt(wb_key, wb_prv_key)) wbtip.crypto_pub_key = wb_pub_key wbtip.crypto_tip_prv_key = Base64Encoder.encode( GCE.asymmetric_encrypt(wb_pub_key, crypto_tip_prv_key)) session.add(wbtip) else: receipt = '' # Apply special handling to the whistleblower identity question if itip.enable_whistleblower_identity and request[ 'identity_provided'] and answers[whistleblower_identity.id]: if crypto_is_available: wbi = base64.b64encode( GCE.asymmetric_encrypt( itip.crypto_tip_pub_key, json.dumps(answers[whistleblower_identity.id] [0]).encode())).decode() else: wbi = answers[whistleblower_identity.id][0] answers[whistleblower_identity.id] = '' db_set_internaltip_data(session, itip.id, 'whistleblower_identity', wbi) if crypto_is_available: answers = base64.b64encode( GCE.asymmetric_encrypt( itip.crypto_tip_pub_key, json.dumps(answers, cls=JSONEncoder).encode())).decode() db_set_internaltip_answers(session, itip.id, questionnaire_hash, answers) for uploaded_file in token.uploaded_files: if not itip.enable_attachments: break if uploaded_file['id'] in request['removed_files']: continue if crypto_is_available: for k in ['name', 'type', 'size']: uploaded_file[k] = base64.b64encode( GCE.asymmetric_encrypt(itip.crypto_tip_pub_key, str(uploaded_file[k]))) new_file = models.InternalFile() new_file.tid = tid new_file.name = uploaded_file['name'] new_file.content_type = uploaded_file['type'] new_file.size = uploaded_file['size'] new_file.internaltip_id = itip.id new_file.filename = uploaded_file['filename'] new_file.submission = uploaded_file['submission'] session.add(new_file) for user in receivers: if crypto_is_available: _tip_key = GCE.asymmetric_encrypt(user.crypto_pub_key, crypto_tip_prv_key) else: _tip_key = b'' db_create_receivertip(session, user, itip, can_access_whistleblower_identity, _tip_key) State.log(tid=tid, type='whistleblower_new_report') return {'receipt': receipt, 'score': itip.total_score}
def force_itip_expiration(self, store): for itip in store.find(models.InternalTip): itip.expiration_date = datetime_null()
def test_datetime_to_pretty_str(self): self.assertEqual(utility.datetime_to_pretty_str(None), 'Thursday 01 January 1970 00:00 (UTC)') self.assertEqual( utility.datetime_to_pretty_str(utility.datetime_null()), 'Thursday 01 January 1970 00:00 (UTC)')
def test_datetime_null(self): self.assertEqual(utility.datetime_null(), datetime.utcfromtimestamp(0))
def force_itip_expiration(self, session): session.query(models.InternalTip).update( {'expiration_date': datetime_null()})
def force_wbtip_expiration(self, session): session.query(models.InternalTip).update( {'wb_last_access': datetime_null()})
Bool(default=False), 'disable_receiver_notification_emails': Bool(default=False), 'tip_expiration_threshold': Int(validator=natnum_v, default=72), # Hours 'notification_threshold_per_hour': Int(validator=natnum_v, default=20), 'exception_email_address': Unicode(validator=shorttext_v, default=u'*****@*****.**'), 'exception_email_pgp_key_fingerprint': Unicode(default=u''), 'exception_email_pgp_key_public': Unicode(default=u''), 'exception_email_pgp_key_expiration': Unicode(default=iso_strf_time(datetime_null())), }, 'node': { 'name': Unicode(validator=shorttext_v, default=u''), 'basic_auth': Bool(default=False), 'basic_auth_username': Unicode(default=u''), 'basic_auth_password': Unicode(default=u''), 'hostname': Unicode(validator=shorttext_v, default=u''), 'onionservice': Unicode(validators=range_v(22, 22), default=u''), 'tb_download_link':
def pgp_options_parse(receiver, request): """ This is called in a @transact, when receiver update prefs and when admin configure a new key (at the moment, Admin GUI do not permit to sets preferences, but still the same function is used. @param receiver: the Storm object @param request: the Dict receiver by the Internets @return: None This function is called in create_recever and update_receiver and is used to manage the PGP options forced by the administrator This is needed also because no one of these fields are *enforced* by unicode_keys or bool_keys in models.Receiver PGP management, here are check'd these actions: 1) Proposed a new PGP key, is imported to check validity, and stored in Storm DB if not error raise 2) Removal of the present key Further improvement: update the keys using keyserver """ new_pgp_key = request.get('pgp_key_public', None) remove_key = request.get('pgp_key_remove', False) # the default receiver.pgp_key_status = u'disabled' if remove_key: log.debug("User %s %s request to remove PGP key (%s)" % (receiver.name, receiver.user.username, receiver.pgp_key_fingerprint)) # In all the cases below, the key is marked disabled as request receiver.pgp_key_status = u'disabled' receiver.pgp_key_info = None receiver.pgp_key_public = None receiver.pgp_key_fingerprint = None receiver.pgp_key_expiration = datetime_null() if new_pgp_key: gnob = GLBPGP() try: result = gnob.load_key(new_pgp_key) log.debug("PGP Key imported: %s" % result['fingerprint']) receiver.pgp_key_status = u'enabled' receiver.pgp_key_info = result['info'] receiver.pgp_key_public = new_pgp_key receiver.pgp_key_fingerprint = result['fingerprint'] receiver.pgp_key_expiration = result['expiration'] except: raise finally: # the finally statement is always called also if # except contains a return or a raise gnob.destroy_environment()
# See smtp_password in private for password 'source_email': Unicode(default='*****@*****.**'), 'security': Unicode(default='TLS'), 'disable_admin_notification_emails': Bool(default=False), 'disable_custodian_notification_emails': Bool(default=False), 'disable_receiver_notification_emails': Bool(default=False), 'tip_expiration_threshold': Int(default=72), # Hours 'notification_threshold_per_hour': Int(default=20), 'exception_email_address': Unicode(default='*****@*****.**'), 'exception_email_pgp_key_fingerprint': Unicode(default=''), 'exception_email_pgp_key_public': Unicode(default=''), 'exception_email_pgp_key_expiration': Unicode(default=iso_strf_time(datetime_null())), }, 'node': { 'name': Unicode(default=''), 'basic_auth': Bool(default=False), 'basic_auth_username': Unicode(default=''), 'basic_auth_password': Unicode(default=''), 'public_site': Unicode(default=''), 'hidden_service': Unicode(default=''), 'tb_download_link': Unicode(default='https://www.torproject.org/download/download'), 'default_language': Unicode(default='en'), # Advanced settings
def initialize_node(store, results, only_node, templates, appdata): """ TODO refactor with languages the email_template, develop a dedicated function outside the node, and inquire f*****g YHWH about the callbacks existence/usage """ node = models.Node(only_node) log.debug("Inizializing ApplicationData") new_appdata = ApplicationData() new_appdata.fields = appdata['fields'] new_appdata.version = appdata['version'] store.add(new_appdata) if 'node_presentation' in appdata: node.presentation = appdata['node_presentation'] if 'node_footer' in appdata: node.footer = appdata['node_footer'] if 'node_subtitle' in appdata: node.subtitle = appdata['node_subtitle'] node.languages_enabled = GLSetting.defaults.languages_enabled node.receipt_salt = get_salt(rstr.xeger('[A-Za-z0-9]{56}')) node.wizard_done = GLSetting.skip_wizard node.creation_date = datetime_now() store.add(node) admin_salt = get_salt(rstr.xeger('[A-Za-z0-9]{56}')) admin_password = hash_password(u"globaleaks", admin_salt) admin_dict = { 'username': u'admin', 'password': admin_password, 'salt': admin_salt, 'role': u'admin', 'state': u'enabled', } admin = models.User(admin_dict) admin.last_login = datetime_null() store.add(admin) notification = models.Notification() # our defaults for free, because we're like Gandhi of the mail accounts. notification.server = "mail.headstrong.de" notification.port = 587 # port 587/SMTP-TLS or 465/SMTPS notification.username = "******" notification.password = "******" notification.security = "TLS" notification.source_name = "Default GlobaLeaks sender" notification.source_email = notification.username # Those fields are sets as default in order to show to the Admin the various # 'variables' used in the template. notification.encrypted_tip_template = { GLSetting.memory_copy.default_language: templates['encrypted_tip'] } notification.encrypted_tip_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: new Tip (Encrypted)" } notification.plaintext_tip_template = { GLSetting.memory_copy.default_language: templates['plaintext_tip'] } notification.plaintext_tip_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: new ClearText" } notification.encrypted_message_template = { GLSetting.memory_copy.default_language: templates['encrypted_message'] } notification.encrypted_message_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: New message (Encrypted)" } notification.plaintext_message_template = { GLSetting.memory_copy.default_language: templates['plaintext_message'] } notification.plaintext_message_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: New message" } notification.encrypted_file_template = { GLSetting.memory_copy.default_language: templates['encrypted_file'] } notification.encrypted_file_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: File attached (Encrypted)" } notification.plaintext_file_template = { GLSetting.memory_copy.default_language: templates['plaintext_file'] } notification.plaintext_file_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: File attached" } notification.encrypted_comment_template = { GLSetting.memory_copy.default_language: templates['encrypted_comment'] } notification.encrypted_comment_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: New comment (Encrypted)" } notification.plaintext_comment_template = { GLSetting.memory_copy.default_language: templates['plaintext_comment'] } notification.plaintext_comment_mail_title = { GLSetting.memory_copy.default_language: "[Tip %TipNum%] for %ReceiverName% in %ContextName%: New comment" } notification.zip_description = { GLSetting.memory_copy.default_language: templates['zip_collection'] } store.add(notification)
def test_is_expired(self): self.assertFalse(utility.is_expired(None)) self.assertTrue(utility.is_expired(utility.datetime_null())) self.assertTrue(utility.is_expired(utility.datetime_now())) self.assertFalse( utility.is_expired(utility.utc_future_date(seconds=1337)))
def force_wbtip_expiration(self, store): store.find(models.InternalTip).set(wb_last_access=datetime_null())
def test_is_expired(self): self.assertTrue(utility.is_expired(utility.datetime_null())) self.assertTrue(utility.is_expired(utility.datetime_now())) self.assertFalse(utility.is_expired(utility.datetime_never()))
def login(session, tid, username, password, authcode, client_using_tor, client_ip): """ login returns a session """ user = None users = session.query(User).filter(User.username == username, User.state != u'disabled', UserTenant.user_id == User.id, UserTenant.tenant_id == tid).distinct() for u in users: if GCE.check_password(u.hash_alg, password, u.salt, u.password): user = u break # Fix for issue: https://github.com/globaleaks/GlobaLeaks/issues/2563 if State.tenant_cache[1].creation_date < 1551740400: u_password = '******'' + u.password + '\'' if GCE.check_password(u.hash_alg, password, u.salt, u_password): user = u break if user is None: log.debug("Login: Invalid credentials") Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication connection_check(client_ip, tid, user.role, client_using_tor) if State.tenant_cache[ 1].two_factor_auth and user.last_login != datetime_null(): token = TwoFactorTokens.get(user.id) if token is not None and authcode != '': if token.token == authcode: TwoFactorTokens.revoke(user.id) else: raise errors.InvalidTwoFactorAuthCode elif token is None and authcode == '': token = TwoFactorTokens.new(user.id) data = {'type': '2fa', 'authcode': str(token.token)} data['node'] = db_admin_serialize_node(session, tid, user.language) data['notification'] = db_get_notification(session, tid, user.language) subject, body = Templating().get_mail_subject_and_body(data) State.sendmail(1, user.mail_address, subject, body) raise errors.TwoFactorAuthCodeRequired else: raise errors.TwoFactorAuthCodeRequired user.last_login = datetime_now() crypto_prv_key = '' if State.tenant_cache[1].encryption and user.crypto_prv_key: user_key = GCE.derive_key(password.encode('utf-8'), user.salt) crypto_prv_key = GCE.symmetric_decrypt(user_key, user.crypto_prv_key) return Sessions.new(tid, user.id, user.role, user.password_change_needed, crypto_prv_key)
def migrate_Notification(self): old_node = self.store_old.find(self.model_from['Node']).one() old_notification = self.store_old.find( self.model_from['Notification']).one() new_notification = self.model_to['Notification']() new_templates = [ 'identity_access_authorized_mail_template', 'identity_access_authorized_mail_title', 'identity_access_denied_mail_template', 'identity_access_denied_mail_title', 'identity_access_request_mail_template', 'identity_access_request_mail_title', 'identity_provided_mail_template', 'identity_provided_mail_title' ] for _, v in new_notification._storm_columns.iteritems(): if self.update_model_with_new_templates(new_notification, v.name, new_templates, self.appdata['templates']): continue if v.name == 'disable_custodian_notification_emails': new_notification.disable_custodian_notification_emails = False continue if v.name == 'disable_receiver_notification_emails': new_notification.disable_receiver_notification_emails = old_notification.disable_receivers_notification_emails continue if v.name == 'tip_expiration_threshold': new_notification.tip_expiration_threshold = 72 # that is the current default continue if v.name == 'exception_email_address': new_notification.exception_email_address = old_node.exception_email continue if v.name == 'exception_email_pgp_key_status': new_notification.exception_email_pgp_key_status = 'disabled' continue if v.name == 'exception_email_pgp_key_info': new_notification.exception_email_pgp_key_info = '' continue if v.name == 'exception_email_pgp_key_fingerprint': new_notification.exception_email_pgp_key_fingerprint = '' continue if v.name == 'exception_email_pgp_key_public': new_notification.exception_email_pgp_key_public = '' continue if v.name == 'exception_email_pgp_key_expiration': new_notification.exception_email_pgp_key_expiration = datetime_null( ) continue if v.name == 'archive_description': new_notification.archive_description = old_notification.zip_description continue old_value = getattr(old_notification, v.name) if v.name in new_notification.localized_keys: for lang in old_value: old_value[lang] = string.replace(old_value[lang], "ReceiverName", "RecipientName") setattr(new_notification, v.name, old_value) if new_notification.server == u'mail.headstrong.de': # If the node is still using the default config update thee config to the new one new_notification.server = 'demo.globaleaks.org' new_notification.port = 9287 new_notification.username = u'hey_you_should_change_me' new_notification.password = u'yes_you_really_should_change_me' new_notification.source_email = u'*****@*****.**' self.store_new.add(new_notification)
def __init__(self): self.dummyReceiverUser = { 'username': u'*****@*****.**', 'password': VALID_HASH1, 'salt': VALID_SALT1, 'role': u'receiver', 'state': u'enabled', 'last_login': datetime_null(), 'timezone': 0, 'language': u'en', 'password_change_needed': False, 'password_change_date': datetime_null() } self.dummyReceiver = { 'password': VALID_PASSWORD1, 'password_change_needed': False, 'name': u'Ned Stark', 'description': u'King MockDummy Receiver', # Email can be different from the user, but at the creation time is used # the same address, therefore we keep the same of dummyReceiver.username 'mail_address': self.dummyReceiverUser['username'], 'ping_mail_address': '*****@*****.**', 'can_delete_submission': True, 'can_postpone_expiration': True, 'contexts': [], 'tip_notification': True, 'ping_notification': True, 'tip_expiration_threshold': 72, 'pgp_key_info': u'', 'pgp_key_fingerprint': u'', 'pgp_key_status': u'disabled', 'pgp_key_public': u'', 'pgp_key_expiration': u'', 'pgp_key_remove': False, 'presentation_order': 0, 'timezone': 0, 'language': u'en', 'configuration': 'default' } self.dummyReceiverPGP = copy.deepcopy(self.dummyReceiver) self.dummyReceiverPGP['pgp_key_public'] = VALID_PGP_KEY1 self.dummyFieldTemplates = [ { 'id': u'd4f06ad1-eb7a-4b0d-984f-09373520cce7', 'is_template': True, 'template_id': '', 'step_id': '', 'fieldgroup_id': '', 'label': u'Field 222', 'type': u'inputbox', 'preview': False, 'description': u'field description', 'hint': u'field hint', 'multi_entry': False, 'multi_entry_hint': '', 'stats_enabled': False, 'required': True, # <- first field is special, 'children': {}, # it's marked as required!!! 'attrs': {}, 'options': [], 'y': 2, 'x': 0 }, { 'id': u'c4572574-6e6b-4d86-9a2a-ba2e9221467d', 'is_template': True, 'template_id': '', 'step_id': '', 'fieldgroup_id': '', 'label': u'Field 2', 'type': u'inputbox', 'preview': False, 'description': 'description', 'hint': u'field hint', 'multi_entry': False, 'multi_entry_hint': '', 'stats_enabled': False, 'required': False, 'children': {}, 'attrs': {}, 'options': [], 'y': 3, 'x': 0 }, { 'id': u'6a6e9282-15e8-47cd-9cc6-35fd40a4a58f', 'is_template': True, 'step_id': '', 'template_id': '', 'fieldgroup_id': '', 'label': u'Generalities', 'type': u'fieldgroup', 'preview': False, 'description': u'field description', 'hint': u'field hint', 'multi_entry': False, 'multi_entry_hint': '', 'stats_enabled': False, 'required': False, 'children': {}, 'attrs': {}, 'options': [], 'y': 4, 'x': 0 }, { 'id': u'7459abe3-52c9-4a7a-8d48-cabe3ffd2abd', 'is_template': True, 'template_id': '', 'step_id': '', 'fieldgroup_id': '', 'label': u'Name', 'type': u'inputbox', 'preview': False, 'description': u'field description', 'hint': u'field hint', 'multi_entry': False, 'multi_entry_hint': '', 'stats_enabled': False, 'required': False, 'children': {}, 'attrs': {}, 'options': [], 'y': 0, 'x': 0 }, { 'id': u'de1f0cf8-63a7-4ed8-bc5d-7cf0e5a2aec2', 'is_template': True, 'template_id': '', 'step_id': '', 'fieldgroup_id': '', 'label': u'Surname', 'type': u'inputbox', 'preview': False, 'description': u'field description', 'hint': u'field hint', 'multi_entry': False, 'multi_entry_hint': '', 'stats_enabled': False, 'required': False, 'children': {}, 'attrs': {}, 'options': [], 'y': 0, 'x': 0 }, { 'id': u'7e1f0cf8-63a7-4ed8-bc5d-7cf0e5a2aec2', 'is_template': True, 'template_id': '', 'step_id': '', 'fieldgroup_id': '', 'label': u'Gender', 'type': u'selectbox', 'preview': False, 'description': u'field description', 'hint': u'field hint', 'multi_entry': False, 'multi_entry_hint': '', 'stats_enabled': False, 'required': False, 'children': {}, 'attrs': {}, 'options': [{ 'id': '2ebf6df8-289a-4f17-aa59-329fe11d232e', 'label': 'Male', 'value': '', 'presentation_order': 0, 'score_points': 0, 'activated_fields': [] }, { 'id': '9c7f343b-ed46-4c9e-9121-a54b6e310123', 'label': 'Female', 'value': '', 'presentation_order': 0, 'score_points': 0, 'activated_fields': [] }], 'y': 0, 'x': 0 } ] self.dummyFields = copy.deepcopy(self.dummyFieldTemplates) self.dummySteps = [{ 'label': u'Step 1', 'description': u'Step Description', 'hint': u'Step Hint', 'presentation_order': 0, 'children': [] }, { 'label': u'Step 2', 'description': u'Step Description', 'hint': u'Step Hint', 'presentation_order': 1, 'children': [] }] self.dummyContext = { # localized stuff 'name': u'Already localized name', 'description': u'Already localized desc', 'steps': [], 'select_all_receivers': True, # tip_timetolive is expressed in days 'tip_timetolive': 20, 'receivers': [], 'maximum_selectable_receivers': 0, 'show_small_cards': False, 'show_receivers': False, 'enable_comments': True, 'enable_private_messages': True, 'presentation_order': 0, 'show_receivers_in_alphabetical_order': False, 'steps_arrangement': 'horizontal', 'reset_steps': False } self.dummySubmission = { 'context_id': '', 'answers': {}, 'receivers': [], 'files': [] } self.dummyNode = { 'name': u"Please, set me: name/title", 'description': u"Pleæs€, set m€: d€scription", 'presentation': u'This is whæt æpp€ærs on top', 'footer': u'check it out https://www.youtube.com/franksentus ;)', 'security_awareness_title': u'', 'security_awareness_text': u'', 'whistleblowing_question': u'', 'whistleblowing_button': u'', 'hidden_service': u"http://1234567890123456.onion", 'public_site': u"https://globaleaks.org", 'email': u"*****@*****.**", 'languages_supported': [], # ignored 'languages_enabled': ["it", "en"], 'password': '', 'old_password': '', 'salt': 'OMG!, the Rains of Castamere ;( ;(', 'salt_receipt': '<<the Lannisters send their regards>>', 'maximum_filesize': GLSettings.defaults.maximum_filesize, 'maximum_namesize': GLSettings.defaults.maximum_namesize, 'maximum_textsize': GLSettings.defaults.maximum_textsize, 'tor2web_admin': True, 'tor2web_submission': True, 'tor2web_receiver': True, 'tor2web_unauth': True, 'can_postpone_expiration': False, 'can_delete_submission': False, 'exception_email': GLSettings.defaults.exception_email, 'ahmia': False, 'allow_unencrypted': True, 'allow_iframes_inclusion': False, 'send_email_for_every_event': False, 'configured': False, 'wizard_done': False, 'custom_homepage': False, 'disable_privacy_badge': False, 'disable_security_awareness_badge': False, 'disable_security_awareness_questions': False, 'disable_key_code_hint': False, 'default_timezone': 0, 'default_language': u'en', 'admin_timezone': 0, 'admin_language': u'en', 'enable_custom_privacy_badge': False, 'custom_privacy_badge_tor': u'', 'custom_privacy_badge_none': u'', 'header_title_homepage': u'', 'header_title_submissionpage': u'', 'header_title_receiptpage': u'', 'landing_page': u'homepage', 'context_selector_label': u'', 'show_contexts_in_alphabetical_order': False, 'submission_minimum_delay': 123, 'submission_maximum_ttl': 1111, }
class BaseHandler(RequestHandler): handler_exec_time_threshold = HANDLER_EXEC_TIME_THRESHOLD start_time = datetime_null() def __init__(self, application, request, **kwargs): super(BaseHandler, self).__init__(application, request, **kwargs) self.name = type(self).__name__ self.req_id = GLSettings.http_requests_counter GLSettings.http_requests_counter += 1 self.handler_time_analysis_begin() self.handler_request_logging_begin() def set_default_headers(self): """ In this function are written some security enforcements related to WebServer versioning and XSS attacks. This is the first function called when a new request reach GLB """ self.request.start_time = datetime_now() # to avoid version attacks self.set_header("Server", "globaleaks") # to reduce possibility for XSS attacks. self.set_header("X-Content-Type-Options", "nosniff") self.set_header("X-XSS-Protection", "1; mode=block") # to mitigate information leakage on Browser/Proxy Cache self.set_header("Cache-control", "no-cache, no-store, must-revalidate") self.set_header("Pragma", "no-cache") self.set_header("Expires", "-1") # to avoid information leakage via referrer self.set_header("Content-Security-Policy", "referrer no-referrer") # to avoid Robots spidering, indexing, caching self.set_header("X-Robots-Tag", "noindex") # to mitigate clickjaking attacks on iframes allowing only same origin # same origin is needed in order to include svg and other html <object> if not GLSettings.memory_copy.allow_iframes_inclusion: self.set_header("X-Frame-Options", "sameorigin") lang = self.request.headers.get('GL-Language', None) if not lang: # before was used the Client language. but shall be unsupported # lang = self.request.headers.get('Accepted-Language', None) lang = GLSettings.memory_copy.default_language self.request.language = lang @staticmethod def validate_python_type(value, python_type): """ Return True if the python class instantiates the python_type given, 'int' fields are accepted also as 'unicode' but cast on base 10 before validate them 'bool' fields are accepted also as 'true' 'false' because this happen on angular.js """ if value is None: return True if python_type == int: try: int(value) return True except Exception: return False if python_type == bool: if value == u'true' or value == u'false': return True return isinstance(value, python_type) @staticmethod def validate_GLtype(value, gl_type): """ Return True if the python class matches the given regexp. """ if isinstance(value, (str, unicode)): return bool(re.match(gl_type, value)) else: return False @staticmethod def validate_type(value, gl_type): # if it's callable, than assumes is a primitive class if callable(gl_type): retval = BaseHandler.validate_python_type(value, gl_type) if not retval: log.err("-- Invalid python_type, in [%s] expected %s" % (value, gl_type)) return retval # value as "{foo:bar}" elif isinstance(gl_type, collections.Mapping): retval = BaseHandler.validate_jmessage(value, gl_type) if not retval: log.err("-- Invalid JSON/dict [%s] expected %s" % (value, gl_type)) return retval # regexp elif isinstance(gl_type, str): retval = BaseHandler.validate_GLtype(value, gl_type) if not retval: log.err("-- Failed Match in regexp [%s] against %s" % (value, gl_type)) return retval # value as "[ type ]" elif isinstance(gl_type, collections.Iterable): # empty list is ok if len(value) == 0: return True else: retval = all(BaseHandler.validate_type(x, gl_type[0]) for x in value) if not retval: log.err("-- List validation failed [%s] of %s" % (value, gl_type)) return retval else: raise AssertionError @staticmethod def validate_jmessage(jmessage, message_template): """ Takes a string that represents a JSON messages and checks to see if it conforms to the message type it is supposed to be. This message must be either a dict or a list. This function may be called recursively to validate sub-parameters that are also go GLType. message: the message string that should be validated message_type: the GLType class it should match. """ if isinstance(message_template, dict): success_check = 0 keys_to_strip = [] for key, value in jmessage.iteritems(): if key not in message_template: # strip whatever is not validated # # reminder: it's not possible to raise an exception for the # in case more values are presenct because it's normal that the # client will send automatically more data. # # e.g. the client will always send 'creation_date' attributs of # objects and attributes like this are present generally only # from the second request on. # keys_to_strip.append(key) continue if not BaseHandler.validate_type(value, message_template[key]): log.err("Received key %s: type validation fail " % key) raise errors.InvalidInputFormat("Key (%s) type validation failure" % key) success_check += 1 for key in keys_to_strip: del jmessage[key] for key, value in message_template.iteritems(): if key not in jmessage.keys(): log.debug("Key %s expected but missing!" % key) log.debug("Received schema %s - Expected %s" % (jmessage.keys(), message_template.keys())) raise errors.InvalidInputFormat("Missing key %s" % key) if not BaseHandler.validate_type(jmessage[key], value): log.err("Expected key: %s type validation failure" % key) raise errors.InvalidInputFormat("Key (%s) double validation failure" % key) success_check += 1 if success_check == len(message_template.keys()) * 2: return True else: log.err("Success counter double check failure: %d" % success_check) raise errors.InvalidInputFormat("Success counter double check failure") elif isinstance(message_template, list): ret = all(BaseHandler.validate_type(x, message_template[0]) for x in jmessage) if not ret: raise errors.InvalidInputFormat("Not every element in %s is %s" % (jmessage, message_template[0])) return True else: raise errors.InvalidInputFormat("invalid json massage: expected dict or list") @staticmethod def validate_message(message, message_template): try: jmessage = json.loads(message) except ValueError: raise errors.InvalidInputFormat("Invalid JSON format") if BaseHandler.validate_jmessage(jmessage, message_template): return jmessage raise errors.InvalidInputFormat("Unexpected condition!?") def output_stripping(self, message, message_template): """ @param message: the serialized dict received @param message_template: the answers definition @return: a dict or a list without the unwanted keys """ pass def on_connection_close(self, *args, **kwargs): pass def prepare(self): """ Here is implemented: - The performance analysts - the Request/Response logging """ if not validate_host(self.request.host): raise errors.InvalidHostSpecified def on_finish(self): """ Here is implemented: - The performance analysts - the Request/Response logging """ for event in outcoming_event_monitored: if event['status_checker'](self._status_code) and \ event['method'] == self.request.method and \ event['handler_check'](self.request.uri): EventTrack(event, self.request.request_time()) self.handler_time_analysis_end() self.handler_request_logging_end() def do_verbose_log(self, content): """ Record in the verbose log the content as defined by Cyclone wrappers. This option is only available in devel mode and intentionally does not filter any input/output; It should be used only for debug purposes. """ try: with open(GLSettings.httplogfile, 'a+') as fd: fdesc.writeToFD(fd.fileno(), content + "\n") except Exception as excep: log.err("Unable to open %s: %s" % (GLSettings.httplogfile, excep)) def write_file(self, filepath): if not (os.path.exists(filepath) or os.path.isfile(filepath)): return try: with open(filepath, "rb") as f: while True: chunk = f.read(GLSettings.file_chunk_size) if len(chunk) == 0: break self.write(chunk) except IOError as srcerr: log.err("Unable to open %s: %s " % (filepath, srcerr.strerror)) def write_error(self, status_code, **kw): exception = kw.get('exception') if exception and hasattr(exception, 'error_code'): error_dict = {} error_dict.update({'error_message': exception.reason, 'error_code': exception.error_code}) if hasattr(exception, 'arguments'): error_dict.update({'arguments': exception.arguments}) else: error_dict.update({'arguments': []}) self.set_status(status_code) self.finish(error_dict) else: RequestHandler.write_error(self, status_code, **kw) def write(self, chunk): """ This is a monkey patch to RequestHandler to allow us to serialize also json list objects. """ if isinstance(chunk, types.ListType): chunk = escape.json_encode(chunk) RequestHandler.write(self, chunk) self.set_header("Content-Type", "application/json") else: RequestHandler.write(self, chunk) @inlineCallbacks def uniform_answers_delay(self): """ @return: nothing. just put a delay to normalize a minimum amount of time used by requests. this impairs time execution analysis this safety measure, able to counteract some side channel attacks, is automatically disabled when the option -z and -l DEBUG are present (because it mean that globaleaks is runned in development mode) """ if GLSettings.loglevel == logging.DEBUG and GLSettings.devel_mode: return uniform_delay = GLSettings.delay_threshold # default 0.800 request_time = self.request.request_time() needed_diff = uniform_delay - request_time if needed_diff > 0: yield deferred_sleep(needed_diff) @property def current_user(self): session_id = self.request.headers.get('X-Session') if session_id is None: # Check for POST based authentication. session_id = self.get_argument('x-session', default=None) if session_id is None: return None try: session = GLSettings.sessions[session_id] except KeyError: return None return session @property def is_whistleblower(self): if not self.current_user or 'role' not in self.current_user: raise errors.NotAuthenticated return self.current_user['role'] == 'wb' @property def is_receiver(self): if not self.current_user or 'role' not in self.current_user: raise errors.NotAuthenticated return self.current_user['role'] == 'receiver' def get_file_upload(self): try: if (int(self.request.arguments['flowTotalSize'][0]) / (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize: log.err("File upload request rejected: file too big") raise errors.FileTooBig(GLSettings.memory_copy.maximum_filesize) if self.request.arguments['flowIdentifier'][0] not in GLUploads: f = GLSecureTemporaryFile(GLSettings.tmp_upload_path) GLUploads[self.request.arguments['flowIdentifier'][0]] = f else: f = GLUploads[self.request.arguments['flowIdentifier'][0]] f.write(self.request.files['file'][0]['body']) if self.request.arguments['flowChunkNumber'][0] != self.request.arguments['flowTotalChunks'][0]: return None uploaded_file = {} uploaded_file['filename'] = self.request.files['file'][0]['filename'] uploaded_file['content_type'] = self.request.files['file'][0]['content_type'] uploaded_file['body_len'] = int(self.request.arguments['flowTotalSize'][0]) uploaded_file['body_filepath'] = f.filepath uploaded_file['body'] = f return uploaded_file except errors.FileTooBig: raise # propagate the exception except Exception as exc: log.err("Error while handling file upload %s" % exc) return None def _handle_request_exception(self, e): if isinstance(e, Failure): exc_type = e.type exc_value = e.value exc_tb = e.getTracebackObject() e = e.value else: exc_type, exc_value, exc_tb = sys.exc_info() if isinstance(e, (HTTPError, HTTPAuthenticationRequired)): if GLSettings.http_log and e.log_message: string_format = "%d %s: " + e.log_message args = [e.status_code, self._request_summary()] + list(e.args) msg = lambda *args: string_format % args log.msg(msg(*args)) if e.status_code not in httplib.responses: log.msg("Bad HTTP status code: %d" % e.status_code) return self.send_error(500, exception=e) else: return self.send_error(e.status_code, exception=e) else: log.err("Uncaught exception %s %s %s" % (exc_type, exc_value, exc_tb)) if GLSettings.http_log: log.msg(e) mail_exception_handler(exc_type, exc_value, exc_tb) return self.send_error(500, exception=e) def handler_time_analysis_begin(self): self.start_time = time.time() def handler_time_analysis_end(self): current_run_time = time.time() - self.start_time if current_run_time > self.handler_exec_time_threshold: error = "Handler [%s] exceeded exec threshold with an execution time of %.2f seconds" % (self.name, current_run_time) log.err(error) send_exception_email(error) def handler_request_logging_begin(self): if GLSettings.devel_mode and GLSettings.http_log >= 0: try: content = (">" * 15) content += (" Request %d " % GLSettings.http_requests_counter) content += (">" * 15) + "\n\n" content += self.request.method + " " + self.request.full_url() + "\n\n" content += "headers:\n" for k, v in self.request.headers.get_all(): content += "%s: %s\n" % (k, v) if type(self.request.body) == dict and 'body' in self.request.body: # this is needed due to cyclone hack for file uploads body = self.request.body['body'].read() else: body = self.request.body if len(body): content += "\nbody:\n" + body + "\n" self.do_verbose_log(content) except Exception as excep: log.err("JSON logging fail (prepare): %s" % excep.message) return if 0 < GLSettings.http_log < GLSettings.http_requests_counter: log.debug("Reached I/O logging limit of %d requests: disabling" % GLSettings.http_log) GLSettings.http_log = -1 def handler_request_logging_end(self): if GLSettings.devel_mode and GLSettings.http_log >= 0: try: content = ("<" * 15) content += (" Response %d " % self.req_id) content += ("<" * 15) + "\n\n" content += "status code: " + str(self._status_code) + "\n\n" content += "headers:\n" for k, v in self._headers.iteritems(): content += "%s: %s\n" % (k, v) if self._write_buffer is not None: content += "\nbody: " + str(self._write_buffer) + "\n" self.do_verbose_log(content) except Exception as excep: log.err("HTTP Requests/Responses logging fail (end): %s" % excep.message)