def operation(self): restart_deferred = defer.Deferred() control_socket = '/var/run/tor/control' self.reset() def startup_callback(tor_conn): self.print_startup_error = True self.tor_conn = tor_conn self.tor_conn.protocol.on_disconnect = restart_deferred log.err('Successfully connected to Tor control port') return self.add_all_hidden_services() def startup_errback(err): if self.print_startup_error: # Print error only on first run or failure or on a failure subsequent to a success condition self.print_startup_error = False log.err('Failed to initialize Tor connection; error: %s', err) restart_deferred.callback(None) if not os.path.exists(control_socket): startup_errback(Exception('Tor control port not open on %s; waiting for Tor to become available' % control_socket)) return deferred_sleep(1) if not os.access(control_socket, os.R_OK): startup_errback(Exception('Unable to access %s; manual permission recheck needed' % control_socket)) return deferred_sleep(1) build_local_tor_connection(reactor).addCallbacks(startup_callback, startup_errback) return restart_deferred
def service(self, restart_deferred): hostname, key = yield get_onion_service_info() control_socket = '/var/run/tor/control' def startup_callback(tor_conn): self.print_startup_error = True tor_conn.protocol.on_disconnect = restart_deferred log.debug('Successfully connected to Tor control port') hs_loc = ('80 localhost:8082') if hostname == '' and key == '': log.info('Creating new onion service') ephs = EphemeralHiddenService(hs_loc) else: log.info('Setting up existing onion service %s', hostname) ephs = EphemeralHiddenService(hs_loc, key) @inlineCallbacks def initialization_callback(ret): log.info('Initialization of hidden-service %s completed.', ephs.hostname) if hostname == '' and key == '': yield set_onion_service_info(ephs.hostname, ephs.private_key) d = ephs.add_to_tor(tor_conn.protocol) d.addCallback(initialization_callback) # pylint: disable=no-member def startup_errback(err): if self.print_startup_error: # Print error only on first run or failure or on a failure subsequent to a success condition self.print_startup_error = False log.err('Failed to initialize Tor connection; error: %s', err) restart_deferred.callback(None) if not os.path.exists(control_socket): log.err( 'Tor control port not open on /var/run/tor/control; waiting for Tor to become available' ) while not os.path.exists(control_socket): yield deferred_sleep(1) if not os.access(control_socket, os.R_OK): log.err( 'Unable to access /var/run/tor/control; manual permission recheck needed' ) while not os.access(control_socket, os.R_OK): yield deferred_sleep(1) d = build_local_tor_connection(reactor) d.addCallback(startup_callback) d.addErrback(startup_errback)
def operation(self): restart_deferred = defer.Deferred() control_socket = '/var/run/tor/control' self.reset() @defer.inlineCallbacks def startup_callback(tor_conn): self.print_startup_error = True self.tor_conn = tor_conn self.tor_conn.protocol.on_disconnect = restart_deferred log.err('Successfully connected to Tor control port') try: version = yield self.tor_conn.protocol.queue_command( "GETINFO version") version = version.split('=')[1] if LooseVersion(version) < LooseVersion('0.3.3.9'): self.onion_service_version = 2 except: pass yield self.add_all_hidden_services() def startup_errback(err): if self.print_startup_error: # Print error only on first run or failure or on a failure subsequent to a success condition self.print_startup_error = False log.err('Failed to initialize Tor connection; error: %s', err) restart_deferred.callback(None) if not os.path.exists(control_socket): startup_errback( Exception( 'Tor control port not open on %s; waiting for Tor to become available' % control_socket)) return deferred_sleep(1) if not os.access(control_socket, os.R_OK): startup_errback( Exception( 'Unable to access %s; manual permission recheck needed' % control_socket)) return deferred_sleep(1) build_local_tor_connection(reactor).addCallbacks( startup_callback, startup_errback) return restart_deferred
def post(self): request = self.validate_message(self.request.content.read(), requests.AuthDesc) delay = random_login_delay() if delay: yield deferred_sleep(delay) tid = int(request['tid']) if tid == 0: tid = self.request.tid session = yield login(tid, request['username'], request['password'], request['authcode'], self.request.client_using_tor, self.request.client_ip) log.debug("Login: Success (%s)" % session.user_role) if tid != self.request.tid: returnValue({ 'redirect': 'https://%s/#/login?token=%s' % (State.tenant_cache[tid].hostname, session.id) }) returnValue(session.serialize())
def uniform_answers_delay(self): """ @return: nothing. just put a delay to normalize a minimum amount of time used by requests. this impairs time execution analysis this safety measure, able to counteract some side channel attacks, is automatically disabled when the option -z and -l DEBUG are present (because it mean that globaleaks is runned in development mode) """ if GLSetting.loglevel == logging.DEBUG and GLSetting.devel_mode: return uniform_delay = GLSetting.delay_threshold # default 0.800 request_time = self.request.request_time() needed_diff = uniform_delay - request_time if needed_diff > 0: #print "uniform delay of %.2fms to reach %.2fms" % ( # (1000.0 * needed_diff), # (1000.0 * uniform_delay) #) yield deferred_sleep(needed_diff) else: #print "uniform delay of %.2fms it's more than %.2fms" % ( # (1000.0 * request_time ), (1000.0 * uniform_delay) #) pass
def post(self): request = self.validate_message(self.request.content.read(), requests.AuthDesc) delay = random_login_delay() if delay: yield deferred_sleep(delay) user_id, status, role, pcn = yield login(self.request.tid, request['username'], request['password'], self.request.client_using_tor, self.request.client_ip, request['token']) session = new_session(self.request.tid, user_id, role, status) returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()), 'status': session.user_status, 'password_change_needed': pcn })
def post(self, rtip_id): tip_export = yield get_tip_export(self.current_user.user_id, rtip_id, self.request.language) self.set_header('X-Download-Options', 'noopen') self.set_header('Content-Type', 'application/octet-stream') self.set_header('Content-Disposition', 'attachment; filename=\"%s.zip\"' % tip_export['tip']['sequence_number']) self.zip_stream = iter(ZipStream(tip_export['files'])) def zip_chunk(): chunk = [] chunk_size = 0 for data in self.zip_stream: if len(data): chunk_size += len(data) chunk.append(data) if chunk_size >= GLSettings.file_chunk_size: return ''.join(chunk) return ''.join(chunk) chunk = yield threads.deferToThread(zip_chunk) while len(chunk): self.write(chunk) self.flush() yield deferred_sleep(0.01) chunk = yield threads.deferToThread(zip_chunk)
def get(self, rtip_id): tip_export = yield get_tip_export(self.current_user.user_id, rtip_id, self.request.language) self.set_header('X-Download-Options', 'noopen') self.set_header('Content-Type', 'application/octet-stream') self.set_header('Content-Disposition', 'attachment; filename=\"%s.zip\"' % tip_export['tip']['sequence_number']) self.zip_stream = iter(ZipStream(tip_export['files'])) def zip_chunk(): chunk = [] chunk_size = 0 for data in self.zip_stream: if len(data): chunk_size += len(data) chunk.append(data) if chunk_size >= GLSettings.file_chunk_size: return ''.join(chunk) return ''.join(chunk) chunk = yield threads.deferToThread(zip_chunk) while len(chunk): self.write(chunk) self.flush() yield deferred_sleep(0.01) chunk = yield threads.deferToThread(zip_chunk)
def post(self): request = self.validate_message(self.request.content.read(), requests.TokenAuthDesc) tid = int(request['tid']) if tid == 0: tid = self.request.tid delay = random_login_delay() if delay: yield deferred_sleep(delay) session = Sessions.get(request['token']) if session is None or session.tid != tid: Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication connection_check(self.request.client_ip, tid, session.user_role, self.request.client_using_tor) session = Sessions.regenerate(session.id) log.debug("Login: Success (%s)" % session.user_role) if tid != self.request.tid: returnValue({ 'redirect': 'https://%s/#/login?token=%s' % (State.tenant_cache[tid].hostname, session.id) }) returnValue(session.serialize())
def post(self): """ Receipt login handler used by whistleblowers """ request = self.validate_message(self.request.content.read(), requests.ReceiptAuthDesc) receipt = request['receipt'] delay = random_login_delay() if delay: yield deferred_sleep(delay) user_id = yield login_whistleblower(receipt, self.request.client_using_tor) GLSessions.revoke_all_sessions(user_id) session = GLSession(user_id, 'whistleblower', 'Enabled') returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()) })
def post(self): """ Login """ request = self.validate_message(self.request.body, requests.ReceiptAuthDesc) receipt = request['receipt'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) using_tor2web = get_tor2web_header(self.request.headers) user_id = yield login_whistleblower(receipt, using_tor2web) yield self.uniform_answers_delay() session = self.generate_session(user_id, 'whistleblower', 'Enabled') auth_answer = { 'role': 'whistleblower', 'session_id': session.id, 'user_id': session.user_id, 'session_expiration': int(GLSettings.sessions[session.id].getTime()) } self.write(auth_answer)
def post(self): """ Login """ request = self.validate_message(self.request.body, requests.AuthDesc) username = request['username'] password = request['password'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) using_tor2web = get_tor2web_header(self.request.headers) user_id, status, role, pcn = yield login(username, password, using_tor2web) yield self.uniform_answers_delay() session = self.generate_session(user_id, role, status) auth_answer = { 'role': role, 'session_id': session.id, 'user_id': session.user_id, 'session_expiration': int(GLSettings.sessions[session.id].getTime()), 'status': session.user_status, 'password_change_needed': pcn } self.write(auth_answer)
def post(self): """ Login """ request = self.validate_message(self.request.body, requests.ReceiptAuthDesc) receipt = request['receipt'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) using_tor2web = get_tor2web_header(self.request.headers) user_id = yield login_whistleblower(receipt, using_tor2web) yield self.uniform_answers_delay() session = GLSession(user_id, 'whistleblower', 'Enabled') self.write({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()) })
def post(self): """ Login """ request = self.validate_message(self.request.body, requests.AuthDesc) username = request['username'] password = request['password'] delay = random_login_delay() if delay: yield deferred_sleep(delay) using_tor2web = self.check_tor2web() try: user_id, status, role, pcn = yield login(username, password, using_tor2web) finally: yield self.uniform_answers_delay() session = GLSession(user_id, role, status) self.write({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()), 'status': session.user_status, 'password_change_needed': pcn })
def post(self): """ Receipt login handler used by whistleblowers """ request = self.validate_message(self.request.body, requests.ReceiptAuthDesc) receipt = request['receipt'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) using_tor2web = get_tor2web_header(self.request.headers) user_id = yield login_whistleblower(receipt, using_tor2web) yield self.uniform_answers_delay() session = GLSession(user_id, 'whistleblower', 'Enabled') self.write({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()) })
def login_delay(tid): """ The function in case of failed_login_attempts introduces an exponential increasing delay between 0 and 42 seconds the function implements the following table: ---------------------------------- | failed_attempts | delay | | x < 5 | 0 | | 5 | random(5, 25) | | 6 | random(6, 36) | | 7 | random(7, 42) | | 8 <= x <= 42 | random(x, 42) | | x > 42 | 42 | ---------------------------------- """ failed_attempts = Settings.failed_login_attempts.get(tid, 0) if failed_attempts < 5: return n = failed_attempts * failed_attempts min_sleep = failed_attempts if failed_attempts < 42 else 42 max_sleep = n if n < 42 else 42 return deferred_sleep(SystemRandom().randint(min_sleep, max_sleep))
def post(self): """ Login """ request = self.validate_message(self.request.content.read(), requests.AuthDesc) username = request['username'] password = request['password'] delay = random_login_delay() if delay: yield deferred_sleep(delay) user_id, status, role, pcn = yield login(username, password, self.request.client_using_tor) # Revoke all other sessions for the newly authenticated user Sessions.revoke_all_sessions(user_id) session = new_session(user_id, role, status) returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()), 'status': session.user_status, 'password_change_needed': pcn })
def post(self): """ Receipt login handler used by whistleblowers """ request = self.validate_message(self.request.body, requests.ReceiptAuthDesc) receipt = request['receipt'] delay = random_login_delay() if delay: yield deferred_sleep(delay) using_tor2web = self.check_tor2web() try: user_id = yield login_whistleblower(receipt, using_tor2web) finally: yield self.uniform_answers_delay() session = GLSession(user_id, 'whistleblower', 'Enabled') self.write({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()) })
def spool_emails(self): mails = yield get_mails_from_the_pool() for mail in mails: sent = yield self.state.sendmail(mail['tid'], mail['address'], mail['subject'], mail['body']) if sent: yield models.delete(models.Mail, models.Mail.id == mail['id']) yield deferred_sleep(1)
def spool_emails(self): delay = 1 mails = yield get_mails_from_the_pool() for mail in mails: sent = yield self.state.sendmail(mail['tid'], mail['address'], mail['subject'], mail['body']) if sent: yield tw(db_del, models.Mail, models.Mail.id == mail['id']) else: delay = delay + 1 if delay < 10 else 10 yield deferred_sleep(delay)
def post(self): """ This is the /login handler expecting login/password/role, """ request = self.validate_message(self.request.body, requests.authDict) username = request['username'] password = request['password'] role = request['role'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) if role not in ['admin', 'wb', 'receiver']: raise errors.InvalidInputFormat( "Denied login request: invalid role requested") if get_tor2web_header(self.request.headers): if not accept_tor2web(role): log.err("Denied login request on Tor2web for role '%s'" % role) raise errors.TorNetworkRequired else: log.debug("Accepted login request on Tor2web for role '%s'" % role) if role == 'admin': user_id, status, pcn = yield login_admin(username, password) elif role == 'wb': user_id = yield login_wb(password) status = 'enabled' pcn = False else: # role == 'receiver' user_id, status, pcn = yield login_receiver(username, password) if user_id is False: GLSetting.failed_login_attempts += 1 raise errors.InvalidAuthRequest session = self.generate_session(user_id, role, status) auth_answer = { 'role': role, 'session_id': session.id, 'user_id': session.user_id, 'session_expiration': int(GLSetting.sessions[session.id].getTime()), 'status': session.user_status, 'password_change_needed': pcn } yield self.uniform_answers_delay() self.write(auth_answer)
def write_file(self, filepath): with open(filepath, "rb") as f: chunk = f.read(GLSettings.file_chunk_size) if (len(chunk) != 0): self.write(chunk) chunk = f.read(GLSettings.file_chunk_size) while(len(chunk) != 0): yield deferred_sleep(0.001) self.write(chunk) chunk = f.read(GLSettings.file_chunk_size)
def post(self): """ This is the /login handler expecting login/password/role, """ request = self.validate_message(self.request.body, requests.AuthDesc) username = request['username'] password = request['password'] role = request['role'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) if role not in ['admin', 'wb', 'receiver']: raise errors.InvalidAuthentication if get_tor2web_header(self.request.headers): if not accept_tor2web(role): log.err("Denied login request on Tor2web for role '%s'" % role) raise errors.TorNetworkRequired else: log.debug("Accepted login request on Tor2web for role '%s'" % role) if role == 'admin': user_id, status, pcn = yield login_admin(username, password) elif role == 'wb': user_id = yield login_wb(password) status = 'enabled' pcn = False else: # role == 'receiver' user_id, status, pcn = yield login_receiver(username, password) if user_id is False: GLSetting.failed_login_attempts += 1 raise errors.InvalidAuthentication session = self.generate_session(user_id, role, status) auth_answer = { 'role': role, 'session_id': session.id, 'user_id': session.user_id, 'session_expiration': int(GLSetting.sessions[session.id].getTime()), 'status': session.user_status, 'password_change_needed': pcn } yield self.uniform_answers_delay() self.write(auth_answer)
def uniform_answers_delay(self): """ @return: nothing. the function perform a sleep uniforming requests to the side_channels_guard defined in GLSettings.side_channels_guard in order to counteract some side channel attacks. """ request_time = self.request.request_time() needed_delay = GLSettings.side_channels_guard - request_time if needed_delay > 0: yield deferred_sleep(needed_delay)
def post(self): request = self.validate_message(self.request.content.read(), requests.ReceiptAuthDesc) connection_check(self.request.client_ip, self.request.tid, 'whistleblower', self.request.client_using_tor) delay = random_login_delay() if delay: yield deferred_sleep(delay) session = yield login_whistleblower(self.request.tid, request['receipt']) log.debug("Login: Success (%s)" % session.user_role) returnValue(session.serialize())
def execution_check(self): self.request.execution_time = datetime.now() - self.request.start_time if self.request.execution_time.seconds > self.handler_exec_time_threshold: err_tup = ("Handler [%s] exceeded execution threshold (of %d secs) with an execution time of %.2f seconds", self.name, self.handler_exec_time_threshold, self.request.execution_time.seconds) log.err(tid=self.request.tid, *err_tup) self.state.schedule_exception_email(self.request.tid, *err_tup) track_handler(self) if self.uniform_answer_time: needed_delay = (float(Settings.side_channels_guard) - (float(self.request.execution_time.microseconds) / float(1000))) / float(1000) if needed_delay > 0: return deferred_sleep(needed_delay)
def execution_check(self): self.request.execution_time = datetime.now() - self.request.start_time if self.request.execution_time.seconds > self.handler_exec_time_threshold: err_tup = ("Handler [%s] exceeded execution threshold (of %d secs) with an execution time of %.2f seconds", self.name, self.handler_exec_time_threshold, self.request.execution_time.seconds) log.err(tid=self.request.tid, *err_tup) self.state.schedule_exception_email(*err_tup) track_handler(self) if self.uniform_answer_time: needed_delay = (Settings.side_channels_guard - (self.request.execution_time.microseconds / 1000)) / 1000 if needed_delay > 0: return deferred_sleep(needed_delay)
def operation(self): if GLSettings.memory_copy.disable_receiver_notification_emails: log.debug("MailFlush: Receiver(s) Notification disabled by Admin") return queue_events = yield load_complete_events() if not len(queue_events): returnValue(None) # remove from this list the event that has not to be sent, for example, # the Files uploaded during the first submission, like issue #444 (zombie edition) filtered_events, to_be_suppressed = filter_notification_event( queue_events) if len(to_be_suppressed): for eid in to_be_suppressed: yield mark_event_as_sent(eid) plugin = getattr(notification, GLSettings.notification_plugins[0])() # This wrap calls plugin/notification.MailNotification notifcb = NotificationMail(plugin) for qe_pos, qe in enumerate(filtered_events): yield notifcb.do_every_notification(qe) if not self.skip_sleep: yield deferred_sleep(2) # This is the notification of the ping, if configured receivers_synthesis = {} for qe in filtered_events: if not qe.receiver_info['ping_notification']: continue if qe.receiver_info['id'] not in receivers_synthesis: receivers_synthesis[qe.receiver_info['id']] = [ qe.receiver_info, 1 ] else: receivers_synthesis[qe.receiver_info['id']][1] += 1 if len(receivers_synthesis.keys()): # I'm taking the element [0] of the list but every element has the same # notification settings; this value is passed to ping_mail_flush at # it is needed by Templating() yield self.ping_mail_flush( filtered_events[0].notification_settings, receivers_synthesis)
def execution_check(self): self.request.execution_time = datetime.now() - self.request.start_time if self.request.execution_time.seconds > self.handler_exec_time_threshold: error = "Handler [%s] exceeded execution threshold (of %d secs) with an execution time of %.2f seconds" % \ (self.name, self.handler_exec_time_threshold, self.request.execution_time.seconds) log.err(error) send_exception_email(error) track_handler(self) if self.uniform_answer_time: needed_delay = (GLSettings.side_channels_guard - (self.request.execution_time.microseconds / 1000)) / 1000 if needed_delay > 0: yield deferred_sleep(needed_delay)
def operation(self): if GLSettings.memory_copy.disable_receiver_notification_emails: log.debug("MailFlush: Receiver(s) Notification disabled by Admin") return queue_events = yield load_complete_events() if not len(queue_events): returnValue(None) # remove from this list the event that has not to be sent, for example, # the Files uploaded during the first submission, like issue #444 (zombie edition) filtered_events, to_be_suppressed = filter_notification_event(queue_events) if len(to_be_suppressed): for eid in to_be_suppressed: yield mark_event_as_sent(eid) plugin = getattr(notification, GLSettings.notification_plugins[0])() # This wrap calls plugin/notification.MailNotification notifcb = NotificationMail(plugin) for qe_pos, qe in enumerate(filtered_events): yield notifcb.do_every_notification(qe) if not self.skip_sleep: yield deferred_sleep(2) # This is the notification of the ping, if configured receivers_synthesis = {} for qe in filtered_events: if not qe.receiver_info['ping_notification']: continue if qe.receiver_info['id'] not in receivers_synthesis: receivers_synthesis[qe.receiver_info['id']] = [qe.receiver_info, 1] else: receivers_synthesis[qe.receiver_info['id']][1] += 1 if len(receivers_synthesis.keys()): # I'm taking the element [0] of the list but every element has the same # notification settings; this value is passed to ping_mail_flush at # it is needed by Templating() yield self.ping_mail_flush(filtered_events[0].notification_settings, receivers_synthesis)
def post(self): request = self.validate_message(self.request.content.read(), requests.ReceiptAuthDesc) receipt = request['receipt'] delay = random_login_delay() if delay: yield deferred_sleep(delay) user_id = yield login_whistleblower(self.request.tid, receipt, self.request.client_using_tor) session = new_session(self.request.tid, user_id, 'whistleblower', 'Enabled') returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()) })
def operation(self): if not GLSetting.memory_copy.receiver_notif_enable: log.debug("MailFlush: Receiver notification disabled") return queue_events = yield load_complete_events() if not len(queue_events): return plugin = getattr(notification, GLSetting.notification_plugins[0])() notifcb = NotificationMail(plugin) for qe in queue_events: yield notifcb.do_every_notification(qe) if not self.skip_sleep: yield deferred_sleep(3) # note, this settings has to be multiply for 3 (seconds in this iteration) # and the results (notification_limit * 3) need to be shorter than the periodic running time # specified in runner.py, that's why is set at FIVE minutes. # the number of 'qe' is capped at GLSetting.notification_limit # TODO: implement ping as an appropriate plugin receivers_synthesis = {} for qe in queue_events: if not qe.receiver_info['ping_notification']: continue if qe.receiver_info['id'] not in receivers_synthesis: receivers_synthesis[qe.receiver_info['id']] = [ qe.receiver_info, 1 ] else: receivers_synthesis[qe.receiver_info['id']][1] += 1 if len(receivers_synthesis.keys()): # I'm taking the element [0] of the list but every element has the same # notification setting. is passed to ping_mail_flush because of the Templating() yield self.ping_mail_flush(queue_events[0].notification_settings, receivers_synthesis)
def post(self): request = self.validate_message(self.request.content.read(), requests.AuthDesc) delay = random_login_delay() if delay: yield deferred_sleep(delay) user_id, status, role, pcn, login_step = yield login( self.request.tid, request['username'], request['password'], request['receiver_second_login'], request['receiver_auth_code'], self.request.client_using_tor, self.request.client_ip, request['token']) if role == 'receiver' and login_step == 'second_login_to_complete': session = new_session_rec_auth(self.request.tid, user_id, role, status, login_step) returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': 1, 'status': session.user_status, 'password_change_needed': pcn, 'receiverLoginState': login_step }) else: session = new_session(self.request.tid, user_id, role, status, login_step) returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()), 'status': session.user_status, 'password_change_needed': pcn, 'receiverLoginState': login_step })
def operation(self): if not GLSetting.memory_copy.receiver_notif_enable: log.debug("MailFlush: Receiver(s) Notification disabled by Admin") return queue_events = yield load_complete_events() if not len(queue_events): returnValue(None) plugin = getattr(notification, GLSetting.notification_plugins[0])() # This wrap calls plugin/notification.MailNotification notifcb = NotificationMail(plugin) for qe_pos, qe in enumerate(queue_events): yield notifcb.do_every_notification(qe) if not self.skip_sleep: yield deferred_sleep(2) # This is the notification of the ping, if configured receivers_synthesis = {} for qe in queue_events: if not qe.receiver_info['ping_notification']: continue if qe.receiver_info['id'] not in receivers_synthesis: receivers_synthesis[qe.receiver_info['id']] = [qe.receiver_info, 1] else: receivers_synthesis[qe.receiver_info['id']][1] += 1 if len(receivers_synthesis.keys()): # I'm taking the element [0] of the list but every element has the same # notification setting. is passed to ping_mail_flush because of the Templating() yield self.ping_mail_flush(queue_events[0].notification_settings, receivers_synthesis)
def operation(self): queue_events = yield load_complete_events() if not len(queue_events): returnValue(None) # remove from this list the event that has not to be sent, for example, # the Files uploaded during the first submission, like issue #444 (zombie edition) filtered_events, to_be_suppressed = filter_notification_event(queue_events) if len(to_be_suppressed): for eid in to_be_suppressed: yield update_event_notification_status(eid, True) for qe in filtered_events: yield self.mail_notification.do_every_notification(qe) if not self.skip_sleep: yield deferred_sleep(2) # This is the notification of the ping, if configured receivers_synthesis = {} for qe in filtered_events: if not qe.receiver_info['ping_notification']: continue if qe.receiver_info['id'] not in receivers_synthesis: receivers_synthesis[qe.receiver_info['id']] = [qe.receiver_info, 1] else: receivers_synthesis[qe.receiver_info['id']][1] += 1 if len(receivers_synthesis.keys()): # I'm taking the element [0] of the list but every element has the same # notification settings; this value is passed to ping_mail_flush at # it is needed by Templating() yield self.ping_mail_flush(filtered_events[0].notification_settings, receivers_synthesis)
def post(self): request = self.validate_message(self.request.content.read(), requests.AuthDesc) delay = random_login_delay() if delay: yield deferred_sleep(delay) tid = int(request['tid']) if tid == 0: tid = self.request.tid user_id, status, role, pcn = yield login(tid, request['username'], request['password'], self.request.client_using_tor, self.request.client_ip, request['token']) if tid == self.request.tid: session = new_session(self.request.tid, user_id, role, status) returnValue({ 'session_id': session.id, 'role': session.user_role, 'user_id': session.user_id, 'session_expiration': int(session.getTime()), 'status': session.user_status, 'password_change_needed': pcn }) else: token = yield get_multitenant_auth_token(user_id, tid) if token: returnValue({ 'redirect': 'https://%s/#/login?token=%s' % (State.tenant_cache[tid].hostname, token) })
def operation(self): for tid in self.state.tenant_state: yield self.cert_expiration_checks(tid) yield deferred_sleep(60)
def post(self): """ This is the /login handler expecting login/password/role, """ request = self.validate_message(self.request.body, requests.authDict) username = request['username'] password = request['password'] role = request['role'] delay = random_login_delay() if delay: yield utility.deferred_sleep(delay) if role not in ['admin', 'wb', 'receiver']: raise errors.InvalidInputFormat("Authentication role %s" % str(role) ) if get_tor2web_header(self.request.headers): if not accept_tor2web(role): log.err("Denied login request on Tor2web for role '%s'" % role) raise errors.TorNetworkRequired else: log.debug("Accepted login request on Tor2web for role '%s'" % role) # Then verify credential, if the channel shall be trusted # # Here is created the session struct, is now explicit in the # three roles, because 'user_id' has a different meaning for every role if role == 'admin': authorized_username = yield login_admin(username, password) if authorized_username is False: GLSetting.failed_login_attempts += 1 raise errors.InvalidAuthRequest new_session_id = self.generate_session(role, authorized_username) auth_answer = { 'role': 'admin', 'session_id': new_session_id, 'user_id': unicode(authorized_username), 'session_expiration': int(GLSetting.sessions[new_session_id].expirydate), } elif role == 'wb': wbtip_id = yield login_wb(password) if wbtip_id is False: GLSetting.failed_login_attempts += 1 raise errors.InvalidAuthRequest new_session_id = self.generate_session(role, wbtip_id) auth_answer = { 'role': 'admin', 'session_id': new_session_id, 'user_id': unicode(wbtip_id), 'session_expiration': int(GLSetting.sessions[new_session_id].expirydate), } elif role == 'receiver': receiver_id = yield login_receiver(username, password) if receiver_id is False: GLSetting.failed_login_attempts += 1 raise errors.InvalidAuthRequest new_session_id = self.generate_session(role, receiver_id) auth_answer = { 'role': 'receiver', 'session_id': new_session_id, 'user_id': unicode(receiver_id), 'session_expiration': int(GLSetting.sessions[new_session_id].expirydate), } else: log.err("Invalid role proposed to authenticate!") raise errors.InvalidAuthRequest yield self.uniform_answers_delay() self.write(auth_answer)