def post(self): request = self.validate_message(self.request.content.read(), requests.AuthDesc) delay = random_login_delay() if delay: yield deferred_sleep(delay) tid = int(request['tid']) if tid == 0: tid = self.request.tid session = yield login(tid, request['username'], request['password'], request['authcode'], self.request.client_using_tor, self.request.client_ip) log.debug("Login: Success (%s)" % session.user_role) if tid != self.request.tid: returnValue({ 'redirect': 'https://%s/#/login?token=%s' % (State.tenant_cache[tid].hostname, session.id) }) returnValue(session.serialize())
def post(self): request = self.validate_message(self.request.content.read(), requests.TokenAuthDesc) tid = int(request['tid']) if tid == 0: tid = self.request.tid delay = random_login_delay() if delay: yield deferred_sleep(delay) session = Sessions.get(request['token']) if session is None or session.tid != tid: Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication connection_check(self.request.client_ip, tid, session.user_role, self.request.client_using_tor) session = Sessions.regenerate(session.id) log.debug("Login: Success (%s)" % session.user_role) if tid != self.request.tid: returnValue({ 'redirect': 'https://%s/#/login?token=%s' % (State.tenant_cache[tid].hostname, session.id) }) returnValue(session.serialize())
def failure_cb(failure): """ @param failure {Failure {twisted.internet.FirstError {Failure}}} """ log.err("SMTP connection failed (Exception: %s)", failure.value.subFailure.value, tid=tid) log.debug(failure) return False
def login_whistleblower(session, tid, receipt): """ login_whistleblower returns a session """ x = None algorithms = [x[0] for x in session.query(WhistleblowerTip.hash_alg).filter(WhistleblowerTip.tid == tid).distinct()] if algorithms: hashes = [] for alg in algorithms: hashes.append(GCE.hash_password(receipt, State.tenant_cache[tid].receipt_salt, alg)) x = session.query(WhistleblowerTip, InternalTip) \ .filter(WhistleblowerTip.receipt_hash.in_(hashes), WhistleblowerTip.tid == tid, InternalTip.id == WhistleblowerTip.id, InternalTip.tid == WhistleblowerTip.tid).one_or_none() if x is None: log.debug("Whistleblower login: Invalid receipt") Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication wbtip = x[0] itip = x[1] itip.wb_last_access = datetime_now() crypto_prv_key = '' if State.tenant_cache[1].encryption and wbtip.crypto_prv_key: user_key = GCE.derive_key(receipt.encode('utf-8'), State.tenant_cache[tid].receipt_salt) crypto_prv_key = GCE.symmetric_decrypt(user_key, wbtip.crypto_prv_key) return Sessions.new(tid, wbtip.id, 'whistleblower', False, crypto_prv_key)
def post(self): request = self.validate_message(self.request.content.read(), requests.ExceptionDesc) exception_email = "URL: %s\n\n" % request['errorUrl'] exception_email += "User Agent: %s\n\n" % request['agent'] exception_email += "Error Message: %s\n\n" % request['errorMessage'] exception_email += "Stacktrace:\n" exception_email += json.dumps(request['stackTrace'], indent=2) self.state.schedule_exception_email(exception_email) log.debug("Received client exception and passed error to exception mail handler")
def operation(self): log.debug('Fetching latest GlobaLeaks version from repository') packages_file = yield self.fetch_packages_file() packages_file = text_type(packages_file, 'utf-8') versions = [p['Version'] for p in deb822.Deb822.iter_paragraphs(packages_file) if p['Package'] == 'globaleaks'] versions.sort(key=V) latest_version = versions[-1] yield evaluate_update_notification(self.state, latest_version) log.debug('The newest version in the repository is: %s', latest_version)
def run_acme_reg_to_finish(domain, accnt_key, priv_key, hostname, tmp_chall_dict, directory_url): """Runs the entire process of ACME registeration""" client = create_v2_client(directory_url, accnt_key) # First we need to create a registration with the email address provided # and accept the terms of service log.info("Using boulder server %s", directory_url) client.net.account = client.new_account( messages.NewRegistration.from_data( terms_of_service_agreed=True ) ) # Now we need to open an order and request our certificate # NOTE: We'll let ACME generate a CSR for our private key as there's # a lot of utility code it uses to generate the CSR in a specific # fashion. Better to use what LE provides than to roll our own as we # we doing with the v1 code # # This will also let us support multi-domain certificat requests in the # future, as well as mandate OCSP-Must-Staple if/when GL's HTTPS server # supports it csr = crypto_util.make_csr(priv_key, [hostname], False) order = client.new_order(csr) authzr = order.authorizations log.info('Created a new order for %s', hostname) # authrz is a list of Authorization resources, we need to find the # HTTP-01 challenge and use it for auth_req in authzr: # pylint: disable=not-an-iterable for chall_body in auth_req.body.challenges: if isinstance(chall_body.chall, challenges.HTTP01): challb = chall_body break if challb is None: raise Exception("HTTP01 challenge unavailable!") _, chall_tok = challb.response_and_validation(client.net.key) v = chall_body.chall.encode("token") log.info('Exposing challenge on %s', v) tmp_chall_dict.set(v, ChallTok(chall_tok)) cr = client.answer_challenge(challb, challb.response(client.net.key)) log.debug('Acme CA responded to challenge request with: %s', cr) order = client.poll_and_finalize(order) return split_certificate_chain(order.fullchain_pem)
def operation(self): current_time = datetime_now() statistic_summary = get_statistics(self.state) if statistic_summary: yield save_statistics(self.state.stats_collection_start_time, current_time, statistic_summary) log.debug("Stored statistics %s collected from %s to %s", statistic_summary, self.state.stats_collection_start_time, current_time) # Hourly Resets self.state.reset_hourly()
def post(self): request = self.validate_message(self.request.content.read(), requests.ReceiptAuthDesc) connection_check(self.request.client_ip, self.request.tid, 'whistleblower', self.request.client_using_tor) delay = random_login_delay() if delay: yield deferred_sleep(delay) session = yield login_whistleblower(self.request.tid, request['receipt']) log.debug("Login: Success (%s)" % session.user_role) returnValue(session.serialize())
def sync_clean_untracked_files(session): """ removes files in Settings.attachments_path that are not tracked by InternalFile/ReceiverFile. """ tracked_files = db_get_tracked_files(session) for filesystem_file in os.listdir(Settings.attachments_path): if filesystem_file not in tracked_files: file_to_remove = os.path.join(Settings.attachments_path, filesystem_file) try: log.debug('Removing untracked file: %s', file_to_remove) fs.overwrite_and_remove(file_to_remove) except OSError: log.err('Failed to remove untracked file', file_to_remove)
def cleaning_dead_files(self): """ This function is called at the start of GlobaLeaks, in bin/globaleaks, and checks if the file is present in temporally_encrypted_dir """ # temporary .aes files must be simply deleted for f in os.listdir(self.settings.tmp_path): path = os.path.join(self.settings.tmp_path, f) log.debug("Removing old temporary file: %s", path) try: os.remove(path) except OSError as excep: log.debug("Error while evaluating removal for %s: %s", path, excep.strerror) # temporary .aes files with lost keys can be deleted # while temporary .aes files with valid current key # will be automagically handled by delivery sched. keypath = os.path.join(self.settings.tmp_path, self.settings.AES_keyfile_prefix) for f in os.listdir(self.settings.attachments_path): path = os.path.join(self.settings.attachments_path, f) try: result = self.settings.AES_file_regexp_comp.match(f) if result is not None: if not os.path.isfile("%s%s" % (keypath, result.group(1))): log.debug("Removing old encrypted file (lost key): %s", path) os.remove(path) except Exception as excep: log.debug("Error while evaluating removal for %s: %s", path, excep)
def process_mail_creation(self, session, tid, data): user_id = data['user']['id'] # Do not spool emails if the receiver has opted out of ntfns for this tip. if not data['tip']['enable_notifications']: log.debug("Discarding emails for %s due to receiver's preference.", user_id) return # https://github.com/globaleaks/GlobaLeaks/issues/798 # TODO: the current solution is global and configurable only by the admin sent_emails = self.state.get_mail_counter(user_id) if sent_emails >= self.state.tenant_cache[tid].notification.notification_threshold_per_hour: log.debug("Discarding emails for receiver %s due to threshold already exceeded for the current hour", user_id) return self.state.increment_mail_counter(user_id) if sent_emails >= self.state.tenant_cache[tid].notification.notification_threshold_per_hour: log.info("Reached threshold of %d emails with limit of %d for receiver %s", sent_emails, self.state.tenant_cache[tid].notification.notification_threshold_per_hour, user_id, tid=tid) # simply changing the type of the notification causes # to send the notification_limit_reached data['type'] = u'receiver_notification_limit_reached' data['node'] = self.serialize_config(session, 'node', tid, data['user']['language']) if data['node']['mode'] != u'whistleblowing.it': data['notification'] = self.serialize_config(session, 'notification', tid, data['user']['language']) else: data['notification'] = self.serialize_config(session, 'notification', 1, data['user']['language']) subject, body = Templating().get_mail_subject_and_body(data) # If the receiver has encryption enabled encrypt the mail body if data['user']['pgp_key_public']: pgpctx = PGPContext(self.state.settings.tmp_path) fingerprint = pgpctx.load_key(data['user']['pgp_key_public'])['fingerprint'] body = pgpctx.encrypt_message(fingerprint, body) session.add(models.Mail({ 'address': data['user']['mail_address'], 'subject': subject, 'body': body, 'tid': tid, }))
def perform_action(session, tid, csr_fields): db_cfg = load_tls_dict(session, tid) pkv = tls.PrivKeyValidator() ok, _ = pkv.validate(db_cfg) if not ok: raise errors.InputValidationError() key_pair = db_cfg['ssl_key'] try: csr_txt = tls.gen_x509_csr_pem(key_pair, csr_fields, Settings.csr_sign_bits) log.debug("Generated a new CSR") return csr_txt except Exception as e: log.err(e) raise errors.InputValidationError('CSR gen failed')
def create_directory(self, path): """ Create the specified directory; Returns True on success, False if the directory was already existing """ if os.path.exists(path): return False log.debug("Creating directory: %s", path) try: os.mkdir(path) except OSError as excep: log.debug("Error in creating directory: %s (%s)", path, excep.strerror) raise excep return True
def add_hidden_service(self, tid, hostname, key): if self.tor_conn is None: return hs_loc = ('80 localhost:8083') if not hostname and not key: if tid in self.startup_semaphore: log.debug('Still waiting for hidden service to start', tid=tid) return self.startup_semaphore[tid] log.info('Creating new onion service', tid=tid) ephs = EphemeralHiddenService(hs_loc) else: log.info('Setting up existing onion service %s', hostname, tid=tid) ephs = EphemeralHiddenService(hs_loc, key) self.hs_map[hostname] = ephs @defer.inlineCallbacks def init_callback(ret): log.info('Initialization of hidden-service %s completed.', ephs.hostname, tid=tid) if not hostname and not key: if tid in State.tenant_cache: self.hs_map[ephs.hostname] = ephs yield set_onion_service_info(tid, ephs.hostname, ephs.private_key) else: yield ephs.remove_from_tor(self.tor_conn.protocol) tid_list = list(set([1, tid])) for x in tid_list: Cache().invalidate(x) yield refresh_memory_variables(tid_list) del self.startup_semaphore[tid] def init_errback(failure): if tid in self.startup_semaphore: del self.startup_semaphore[tid] raise failure.value self.startup_semaphore[tid] = ephs.add_to_tor(self.tor_conn.protocol) return self.startup_semaphore[tid].addCallbacks(init_callback, init_errback) # pylint: disable=no-member
def download_rfile(self, session, tid, user_id, file_id): rfile, rtip = session.query(models.ReceiverFile, models.ReceiverTip) \ .filter(models.ReceiverFile.id == file_id, models.ReceiverFile.receivertip_id == models.ReceiverTip.id, models.ReceiverTip.receiver_id == user_id, models.ReceiverTip.internaltip_id == models.InternalTip.id, models.InternalTip.tid == tid).one() if not rfile: raise errors.ModelNotFound(models.ReceiverFile) log.debug("Download of file %s by receiver %s (%d)" % (rfile.internalfile_id, rtip.receiver_id, rfile.downloads)) rfile.last_access = datetime_now() rfile.downloads += 1 return serializers.serialize_rfile(session, tid, rfile), rtip.crypto_tip_prv_key
def write_upload_plaintext_to_disk(self, destination): """ @param uploaded_file: uploaded_file data struct @param the file destination @return: a descriptor dictionary for the saved file """ try: log.debug('Creating file %s with %d bytes', destination, self.uploaded_file['size']) with self.uploaded_file['body'].open('r') as encrypted_file, open(destination, 'wb') as plaintext_file: while True: chunk = encrypted_file.read(abstract.FileDescriptor.bufferSize) if not chunk: break plaintext_file.write(chunk) finally: self.uploaded_file['path'] = destination
def process_mail_creation(self, session, tid, data): user_id = data['user']['id'] # Do not spool emails if the receiver has opted out of ntfns for this tip. if not data['tip']['enable_notifications']: log.debug("Discarding emails for %s due to receiver's preference.", user_id) return # https://github.com/globaleaks/GlobaLeaks/issues/798 # TODO: the current solution is global and configurable only by the admin sent_emails = self.state.get_mail_counter(user_id) if sent_emails >= self.state.tenant_cache[ tid].notification.notification_threshold_per_hour: log.debug( "Discarding emails for receiver %s due to threshold already exceeded for the current hour", user_id) return self.state.increment_mail_counter(user_id) if sent_emails >= self.state.tenant_cache[ tid].notification.notification_threshold_per_hour: log.info( "Reached threshold of %d emails with limit of %d for receiver %s", sent_emails, self.state.tenant_cache[tid].notification. notification_threshold_per_hour, user_id, tid=tid) # simply changing the type of the notification causes # to send the notification_limit_reached data['type'] = u'receiver_notification_limit_reached' data['node'] = self.serialize_config(session, 'node', tid, data['user']['language']) if not data['node']['allow_unencrypted'] and len( data['user']['pgp_key_public']) == 0: return if data['node']['mode'] != u'whistleblowing.it': data['notification'] = self.serialize_config( session, 'notification', tid, data['user']['language']) else: data['notification'] = self.serialize_config( session, 'notification', 1, data['user']['language']) subject, body = Templating().get_mail_subject_and_body(data) # If the receiver has encryption enabled encrypt the mail body if data['user']['pgp_key_public']: pgpctx = PGPContext(self.state.settings.tmp_path) fingerprint = pgpctx.load_key( data['user']['pgp_key_public'])['fingerprint'] body = pgpctx.encrypt_message(fingerprint, body) session.add( models.Mail({ 'address': data['user']['mail_address'], 'subject': subject, 'body': body, 'tid': tid, }))
def operation(self): net_agent = self.state.get_agent() log.debug('Fetching list of Tor exit nodes') yield State.tor_exit_set.update(net_agent) log.debug('Retrieved a list of %d exit nodes', len(State.tor_exit_set))
def db_create_submission(session, tid, request, uploaded_files, client_using_tor): answers = request['answers'] context, questionnaire = session.query(models.Context, models.Questionnaire) \ .filter(models.Context.id == request['context_id'], models.Questionnaire.id == models.Context.questionnaire_id, models.Questionnaire.tid.in_(set([1, tid]))).one_or_none() if not context: raise errors.ModelNotFound(models.Context) steps = db_get_questionnaire(session, tid, questionnaire.id, None)['steps'] questionnaire_hash = text_type(sha256(json.dumps(steps))) db_archive_questionnaire_schema(session, steps, questionnaire_hash) submission = models.InternalTip() submission.tid = tid submission.status = db_get_id_for_system_status(session, tid, 'new') submission.progressive = db_assign_submission_progressive(session, tid) if context.tip_timetolive > 0: submission.expiration_date = get_expiration(context.tip_timetolive) else: submission.expiration_date = datetime_never() # this is get from the client as it the only possibility possible # that would fit with the end to end submission. # the score is only an indicator and not a critical information so we can accept to # be fooled by the malicious user. submission.total_score = request['total_score'] # The status https is used to keep track of the security level adopted by the whistleblower submission.https = not client_using_tor submission.context_id = context.id submission.enable_two_way_comments = context.enable_two_way_comments submission.enable_two_way_messages = context.enable_two_way_messages submission.enable_attachments = context.enable_attachments whistleblower_identity = session.query(models.Field) \ .filter(models.Field.template_id == u'whistleblower_identity', models.Field.step_id == models.Step.id, models.Step.questionnaire_id == context.questionnaire_id).one_or_none() submission.enable_whistleblower_identity = whistleblower_identity is not None if submission.enable_whistleblower_identity and request['identity_provided']: submission.identity_provided = True submission.identity_provided_date = datetime_now() submission.questionnaire_hash = questionnaire_hash submission.preview = extract_answers_preview(steps, answers) session.add(submission) session.flush() receipt = text_type(generateRandomReceipt()) wbtip = models.WhistleblowerTip() wbtip.id = submission.id wbtip.tid = submission.tid wbtip.receipt_hash = hash_password(receipt, State.tenant_cache[tid].receipt_salt) session.add(wbtip) db_save_questionnaire_answers(session, tid, submission.id, answers) for filedesc in uploaded_files: new_file = models.InternalFile() new_file.tid = tid new_file.name = filedesc['name'] new_file.description = "" new_file.content_type = filedesc['type'] new_file.size = filedesc['size'] new_file.internaltip_id = submission.id new_file.submission = filedesc['submission'] new_file.filename = filedesc['filename'] session.add(new_file) log.debug("=> file associated %s|%s (%d bytes)", new_file.name, new_file.content_type, new_file.size) if context.maximum_selectable_receivers > 0 and \ len(request['receivers']) > context.maximum_selectable_receivers: raise errors.InputValidationError("selected an invalid number of recipients") rtips_count = 0 for receiver, user in session.query(models.Receiver, models.User) \ .filter(models.Receiver.id.in_(request['receivers']), models.ReceiverContext.receiver_id == models.Receiver.id, models.ReceiverContext.context_id == context.id, models.User.id == models.Receiver.id, models.UserTenant.user_id == models.User.id, models.UserTenant.tenant_id == tid): if user.pgp_key_public or State.tenant_cache[tid].allow_unencrypted: db_create_receivertip(session, receiver, submission) rtips_count += 1 if rtips_count == 0: raise errors.InputValidationError("need at least one recipient") log.debug("The finalized submission had created %d models.ReceiverTip(s)", rtips_count) return {'receipt': receipt}
def childDataReceived(self, childFD, data): for line in data.split('\n'): if line: log.debug(line)
def access_wbfile(self, session, wbfile): wbfile.downloads += 1 log.debug("Download of file %s by whistleblower %s", wbfile.id, self.current_user.user_id)
def db_create_submission(session, tid, request, token, client_using_tor): if not request['receivers']: raise errors.InputValidationError("need at least one recipient") answers = request['answers'] context, questionnaire = session.query(models.Context, models.Questionnaire) \ .filter(models.Context.id == request['context_id'], models.Questionnaire.id == models.Context.questionnaire_id, models.Questionnaire.tid.in_(set([1, tid]))).one_or_none() if not context: raise errors.ModelNotFound(models.Context) steps = db_get_questionnaire(session, tid, questionnaire.id, None)['steps'] questionnaire_hash = db_archive_questionnaire_schema(session, steps) itip = models.InternalTip() itip.tid = tid itip.status = db_get_id_for_system_status(session, tid, u'new') itip.progressive = db_assign_submission_progressive(session, tid) itip.additional_questionnaire_id = context.additional_questionnaire_id if context.tip_timetolive > 0: itip.expiration_date = get_expiration(context.tip_timetolive) # Evaluate the score level itip.total_score = request['total_score'] # The status https is used to keep track of the security level adopted by the whistleblower itip.https = not client_using_tor itip.context_id = context.id itip.enable_two_way_comments = context.enable_two_way_comments itip.enable_two_way_messages = context.enable_two_way_messages itip.enable_attachments = context.enable_attachments x = session.query(models.Field, models.FieldAttr.value) \ .filter(models.Field.template_id == u'whistleblower_identity', models.Field.step_id == models.Step.id, models.Step.questionnaire_id == context.questionnaire_id, models.FieldAttr.field_id == models.Field.id, models.FieldAttr.name == u'visibility_subject_to_authorization').one_or_none() whistleblower_identity = None can_access_whistleblower_identity = True if x: whistleblower_identity = x[0] can_access_whistleblower_identity = not x[1] itip.enable_whistleblower_identity = whistleblower_identity is not None itip.preview = extract_answers_preview(steps, answers) session.add(itip) session.flush() crypto_is_available = State.tenant_cache[1].encryption # Evaluate if encryption is available if crypto_is_available: users_count = session.query(models.User) \ .filter(models.User.id.in_(request['receivers']), models.User.crypto_prv_key != b'').count() crypto_is_available = users_count == len(request['receivers']) if crypto_is_available: crypto_tip_prv_key, itip.crypto_tip_pub_key = GCE.generate_keypair( ) # Evaluate if the whistleblower tip should be generated if ((not context.enable_scoring_system) or (context.score_threshold_receipt == 0) or (context.score_threshold_receipt == 1 and itip.total_score >= 2) or (context.score_threshold_receipt == 2 and itip.total_score == 3)): receipt = GCE.generate_receipt() receipt_salt = State.tenant_cache[tid].receipt_salt wbtip = models.WhistleblowerTip() wbtip.id = itip.id wbtip.tid = tid wbtip.hash_alg = GCE.HASH wbtip.receipt_hash = GCE.hash_password(receipt, receipt_salt) # Evaluate if the whistleblower tip should be encrypted if crypto_is_available: crypto_tip_prv_key, itip.crypto_tip_pub_key = GCE.generate_keypair( ) wb_key = GCE.derive_key(receipt.encode(), receipt_salt) wb_prv_key, wb_pub_key = GCE.generate_keypair() wbtip.crypto_prv_key = GCE.symmetric_encrypt(wb_key, wb_prv_key) wbtip.crypto_pub_key = wb_pub_key wbtip.crypto_tip_prv_key = GCE.asymmetric_encrypt( wb_pub_key, crypto_tip_prv_key) session.add(wbtip) else: receipt = '' # Apply special handling to the whistleblower identity question if itip.enable_whistleblower_identity and request[ 'identity_provided'] and answers[whistleblower_identity.id]: if crypto_is_available: wbi = base64.b64encode( GCE.asymmetric_encrypt( itip.crypto_tip_pub_key, json.dumps(answers[whistleblower_identity.id] [0]).encode())).decode() answers[whistleblower_identity.id] = '' else: wbi = answers[whistleblower_identity.id][0] db_set_internaltip_data(session, itip.id, 'identity_provided', True, False) db_set_internaltip_data(session, itip.id, 'whistleblower_identity', wbi, crypto_is_available) if crypto_is_available: answers = base64.b64encode( GCE.asymmetric_encrypt(itip.crypto_tip_pub_key, json.dumps(answers).encode())).decode() else: db_save_questionnaire_answers(session, tid, itip.id, answers) db_set_internaltip_answers(session, itip.id, questionnaire_hash, answers, crypto_is_available) for filedesc in token.uploaded_files: new_file = models.InternalFile() new_file.tid = tid new_file.encrypted = crypto_is_available new_file.name = filedesc['name'] new_file.description = "" new_file.content_type = filedesc['type'] new_file.size = filedesc['size'] new_file.internaltip_id = itip.id new_file.submission = filedesc['submission'] new_file.filename = filedesc['filename'] session.add(new_file) log.debug("=> file associated %s|%s (%d bytes)", new_file.name, new_file.content_type, new_file.size) if context.maximum_selectable_receivers > 0 and \ len(request['receivers']) > context.maximum_selectable_receivers: raise errors.InputValidationError( "selected an invalid number of recipients") for user in session.query(models.User).filter( models.User.id.in_(request['receivers'])): if not crypto_is_available and not user.pgp_key_public and not State.tenant_cache[ tid].allow_unencrypted: continue _tip_key = b'' if crypto_is_available: _tip_key = GCE.asymmetric_encrypt(user.crypto_pub_key, crypto_tip_prv_key) db_create_receivertip(session, user, itip, can_access_whistleblower_identity, _tip_key) return {'receipt': receipt, 'score': itip.total_score}
def sendmail(tid, smtp_host, smtp_port, security, authentication, username, password, from_name, from_address, to_address, subject, body, anonymize=True, socks_host='127.0.0.1', socks_port=9050): """ Send an email using SMTPS/SMTP+TLS and maybe torify the connection. @param to_address: the 'To:' field of the email @param subject: the mail subject @param body: the mail body @return: a {Deferred} that returns a success {bool} if the message was passed to the server. """ try: timeout = 30 message = MIME_mail_build(from_name, from_address, to_address, to_address, subject, body) log.debug('Sending email to %s using SMTP server [%s:%d] [%s]', to_address, smtp_host, smtp_port, security, tid=tid) context_factory = TLSClientContextFactory() smtp_deferred = defer.Deferred() factory = ESMTPSenderFactory( username.encode('utf-8') if authentication else None, password.encode('utf-8') if authentication else None, from_address, to_address, message, smtp_deferred, contextFactory=context_factory, requireAuthentication=authentication, requireTransportSecurity=(security == 'TLS'), retries=0, timeout=timeout) if security == "SSL": factory = tls.TLSMemoryBIOFactory(context_factory, True, factory) if anonymize: socksProxy = TCP4ClientEndpoint(reactor, socks_host, socks_port, timeout=timeout) endpoint = SOCKS5ClientEndpoint(smtp_host.encode('utf-8'), smtp_port, socksProxy) else: endpoint = TCP4ClientEndpoint(reactor, smtp_host.encode('utf-8'), smtp_port, timeout=timeout) conn_deferred = endpoint.connect(factory) final = defer.DeferredList([conn_deferred, smtp_deferred], fireOnOneErrback=True, consumeErrors=True) def failure_cb(failure): """ @param failure {Failure {twisted.internet.FirstError {Failure}}} """ log.err("SMTP connection failed (Exception: %s)", failure.value.subFailure.value, tid=tid) log.debug(failure) return False def success_cb(results): """ @param results {list of (success, return_val) tuples} """ return True return final.addCallbacks(success_cb, failure_cb) except Exception as excep: # avoids raising an exception inside email logic to avoid chained errors log.err("Unexpected exception in sendmail: %s", str(excep), tid=tid) return defer.succeed(False)
def receiverfile_planning(session): """ This function roll over the InternalFile uploaded, extract a path, id and receivers associated, one entry for each combination. representing the ReceiverFile that need to be created. """ receiverfiles_maps = {} for ifile in session.query(models.InternalFile).filter( models.InternalFile.new == True).order_by( models.InternalFile.creation_date): if ifile.processing_attempts >= INTERNALFILES_HANDLE_RETRY_MAX: ifile.new = False log.err( "Failed to handle receiverfiles creation for ifile %s (%d retries)", ifile.id, INTERNALFILES_HANDLE_RETRY_MAX) continue elif ifile.processing_attempts >= 1: log.err( "Failed to handle receiverfiles creation for ifile %s (retry %d/%d)", ifile.id, ifile.processing_attempts, INTERNALFILES_HANDLE_RETRY_MAX) if ifile.processing_attempts: log.debug( "Starting handling receiverfiles creation for ifile %s retry %d/%d", ifile.id, ifile.processing_attempts, INTERNALFILES_HANDLE_RETRY_MAX) ifile.processing_attempts += 1 for rtip, user in session.query(models.ReceiverTip, models.User) \ .filter(models.ReceiverTip.internaltip_id == ifile.internaltip_id, models.User.id == models.ReceiverTip.receiver_id): receiverfile = models.ReceiverFile() receiverfile.internalfile_id = ifile.id receiverfile.receivertip_id = rtip.id receiverfile.filename = ifile.filename receiverfile.size = ifile.size receiverfile.status = u'processing' # https://github.com/globaleaks/GlobaLeaks/issues/444 # avoid to mark the receiverfile as new if it is part of a submission # this way we avoid to send unuseful messages receiverfile.new = False if ifile.submission else True session.add(receiverfile) session.flush() if ifile.id not in receiverfiles_maps: receiverfiles_maps[ifile.id] = { 'plaintext_file_needed': False, 'ifile_id': ifile.id, 'ifile_name': ifile.filename, 'ifile_size': ifile.size, 'rfiles': [], 'tid': user.tid, } receiverfiles_maps[ifile.id]['rfiles'].append({ 'id': receiverfile.id, 'status': u'processing', 'filename': ifile.filename, 'size': ifile.size, 'receiver': { 'name': user.name, 'pgp_key_public': user.pgp_key_public, 'pgp_key_fingerprint': user.pgp_key_fingerprint, }, }) return receiverfiles_maps
def login(session, tid, username, password, authcode, client_using_tor, client_ip): """ login returns a session """ user = None users = session.query(User).filter(User.username == username, User.state != u'disabled', UserTenant.user_id == User.id, UserTenant.tenant_id == tid).distinct() for u in users: if GCE.check_password(u.hash_alg, password, u.salt, u.password): user = u break # Fix for issue: https://github.com/globaleaks/GlobaLeaks/issues/2563 if State.tenant_cache[1].creation_date < 1551740400: u_password = '******'' + u.password + '\'' if GCE.check_password(u.hash_alg, password, u.salt, u_password): user = u break if user is None: log.debug("Login: Invalid credentials") Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication connection_check(client_ip, tid, user.role, client_using_tor) if State.tenant_cache[ 1].two_factor_auth and user.last_login != datetime_null(): token = TwoFactorTokens.get(user.id) if token is not None and authcode != '': if token.token == authcode: TwoFactorTokens.revoke(user.id) else: raise errors.InvalidTwoFactorAuthCode elif token is None and authcode == '': token = TwoFactorTokens.new(user.id) data = {'type': '2fa', 'authcode': str(token.token)} data['node'] = db_admin_serialize_node(session, tid, user.language) data['notification'] = db_get_notification(session, tid, user.language) subject, body = Templating().get_mail_subject_and_body(data) State.sendmail(1, user.mail_address, subject, body) raise errors.TwoFactorAuthCodeRequired else: raise errors.TwoFactorAuthCodeRequired user.last_login = datetime_now() crypto_prv_key = '' if State.tenant_cache[1].encryption and user.crypto_prv_key: user_key = GCE.derive_key(password.encode('utf-8'), user.salt) crypto_prv_key = GCE.symmetric_decrypt(user_key, user.crypto_prv_key) return Sessions.new(tid, user.id, user.role, user.password_change_needed, crypto_prv_key)
def post(self, tip_id): yield self.can_perform_action(self.request.tid, tip_id, self.uploaded_file['name']) yield register_wbfile_on_db(self.request.tid, tip_id, self.uploaded_file) log.debug("Recorded new WhistleblowerFile %s", self.uploaded_file['name'])
def validate_jmessage(jmessage, message_template): """ Takes a string that represents a JSON messages and checks to see if it conforms to the message type it is supposed to be. This message must be either a dict or a list. This function may be called recursively to validate sub-parameters that are also go GLType. message: the message string that should be validated message_type: the GLType class it should match. """ if isinstance(message_template, dict): success_check = 0 keys_to_strip = [] for key, value in jmessage.items(): if key not in message_template: # strip whatever is not validated # # reminder: it's not possible to raise an exception for the # in case more values are present because it's normal that the # client will send automatically more data. # # e.g. the client will always send 'creation_date' attributes of # objects and attributes like this are present generally only # from the second request on. # keys_to_strip.append(key) continue if not BaseHandler.validate_type(value, message_template[key]): log.err("Received key %s: type validation fail", key) raise errors.InputValidationError( "Key (%s) type validation failure" % key) success_check += 1 for key in keys_to_strip: del jmessage[key] for key, value in message_template.items(): if key not in jmessage: log.debug("Key %s expected but missing!", key) log.debug("Received schema %s - Expected %s", jmessage.keys(), message_template.keys()) raise errors.InputValidationError("Missing key %s" % key) if not BaseHandler.validate_type(jmessage[key], value): log.err("Expected key: %s type validation failure", key) raise errors.InputValidationError( "Key (%s) double validation failure" % key) if isinstance(message_template[key], dict) or isinstance( message_template[key], list): if message_template[key]: BaseHandler.validate_jmessage(jmessage[key], message_template[key]) success_check += 1 if success_check != len(message_template) * 2: log.err("Success counter double check failure: %d", success_check) raise errors.InputValidationError( "Success counter double check failure") return True elif isinstance(message_template, list): if not all( BaseHandler.validate_type(x, message_template[0]) for x in jmessage): raise errors.InputValidationError( "Not every element in %s is %s" % (jmessage, message_template[0])) return True else: raise errors.InputValidationError( "invalid json massage: expected dict or list")
def validate_jmessage(jmessage, message_template): """ Takes a string that represents a JSON messages and checks to see if it conforms to the message type it is supposed to be. This message must be either a dict or a list. This function may be called recursively to validate sub-parameters that are also go GLType. message: the message string that should be validated message_type: the GLType class it should match. """ if isinstance(message_template, dict): success_check = 0 keys_to_strip = [] for key, value in jmessage.items(): if key not in message_template: # strip whatever is not validated # # reminder: it's not possible to raise an exception for the # in case more values are present because it's normal that the # client will send automatically more data. # # e.g. the client will always send 'creation_date' attributes of # objects and attributes like this are present generally only # from the second request on. # keys_to_strip.append(key) continue if not BaseHandler.validate_type(value, message_template[key]): log.err("Received key %s: type validation fail", key) raise errors.InputValidationError("Key (%s) type validation failure" % key) success_check += 1 for key in keys_to_strip: del jmessage[key] for key, value in message_template.items(): if key not in jmessage: log.debug("Key %s expected but missing!", key) log.debug("Received schema %s - Expected %s", jmessage.keys(), message_template.keys()) raise errors.InputValidationError("Missing key %s" % key) if not BaseHandler.validate_type(jmessage[key], value): log.err("Expected key: %s type validation failure", key) raise errors.InputValidationError("Key (%s) double validation failure" % key) if isinstance(message_template[key], (dict, list)) and message_template[key]: BaseHandler.validate_jmessage(jmessage[key], message_template[key]) success_check += 1 if success_check != len(message_template) * 2: log.err("Success counter double check failure: %d", success_check) raise errors.InputValidationError("Success counter double check failure") return True elif isinstance(message_template, list): if not all(BaseHandler.validate_type(x, message_template[0]) for x in jmessage): raise errors.InputValidationError("Not every element in %s is %s" % (jmessage, message_template[0])) return True else: raise errors.InputValidationError("invalid json massage: expected dict or list")
def perform_file_action(cls, tid): log.info("Generating the HTTPS key with %d bits" % Settings.key_bits) key = yield deferToThread(tls.gen_rsa_key, Settings.key_bits) log.debug("Saving the HTTPS key") yield cls.save_tls_key(tid, key)
def db_create_submission(session, tid, request, token, client_using_tor): if not request['receivers']: raise errors.InputValidationError("need at least one recipient") answers = request['answers'] context, questionnaire = session.query(models.Context, models.Questionnaire) \ .filter(models.Context.id == request['context_id'], models.Questionnaire.id == models.Context.questionnaire_id, models.Questionnaire.tid.in_(set([1, tid]))).one_or_none() if not context: raise errors.ModelNotFound(models.Context) steps = db_get_questionnaire(session, tid, questionnaire.id, None)['steps'] questionnaire_hash = db_archive_questionnaire_schema(session, steps) itip = models.InternalTip() itip.tid = tid itip.status = db_get_id_for_system_status(session, tid, u'new') itip.progressive = db_assign_submission_progressive(session, tid) itip.additional_questionnaire_id = context.additional_questionnaire_id if context.tip_timetolive > 0: itip.expiration_date = get_expiration(context.tip_timetolive) # Evaluate the score level itip.total_score = request['total_score'] # The status https is used to keep track of the security level adopted by the whistleblower itip.https = not client_using_tor itip.context_id = context.id itip.enable_two_way_comments = context.enable_two_way_comments itip.enable_two_way_messages = context.enable_two_way_messages itip.enable_attachments = context.enable_attachments x = session.query(models.Field, models.FieldAttr.value) \ .filter(models.Field.template_id == u'whistleblower_identity', models.Field.step_id == models.Step.id, models.Step.questionnaire_id == context.questionnaire_id, models.FieldAttr.field_id == models.Field.id, models.FieldAttr.name == u'visibility_subject_to_authorization').one_or_none() whistleblower_identity = None can_access_whistleblower_identity = True if x: whistleblower_identity = x[0] can_access_whistleblower_identity = not x[1] itip.enable_whistleblower_identity = whistleblower_identity is not None itip.preview = extract_answers_preview(steps, answers) session.add(itip) session.flush() crypto_is_available = State.tenant_cache[1].encryption # Evaluate if encryption is available if crypto_is_available: users_count = session.query(models.User) \ .filter(models.User.id.in_(request['receivers']), models.User.crypto_prv_key != b'').count() crypto_is_available = users_count == len(request['receivers']) if crypto_is_available: crypto_tip_prv_key, itip.crypto_tip_pub_key = GCE.generate_keypair() # Evaluate if the whistleblower tip should be generated if ((not context.enable_scoring_system) or (context.score_threshold_receipt == 0) or (context.score_threshold_receipt == 1 and itip.total_score >= 2) or (context.score_threshold_receipt == 2 and itip.total_score == 3)): receipt = GCE.generate_receipt() receipt_salt = State.tenant_cache[tid].receipt_salt wbtip = models.WhistleblowerTip() wbtip.id = itip.id wbtip.tid = tid wbtip.hash_alg = GCE.HASH wbtip.receipt_hash = GCE.hash_password(receipt, receipt_salt) # Evaluate if the whistleblower tip should be encrypted if crypto_is_available: crypto_tip_prv_key, itip.crypto_tip_pub_key = GCE.generate_keypair() wb_key = GCE.derive_key(receipt.encode(), receipt_salt) wb_prv_key, wb_pub_key = GCE.generate_keypair() wbtip.crypto_prv_key = GCE.symmetric_encrypt(wb_key, wb_prv_key) wbtip.crypto_pub_key = wb_pub_key wbtip.crypto_tip_prv_key = GCE.asymmetric_encrypt(wb_pub_key, crypto_tip_prv_key) session.add(wbtip) else: receipt = '' # Apply special handling to the whistleblower identity question if itip.enable_whistleblower_identity and request['identity_provided'] and answers[whistleblower_identity.id]: if crypto_is_available: wbi = base64.b64encode(GCE.asymmetric_encrypt(itip.crypto_tip_pub_key, json.dumps(answers[whistleblower_identity.id][0]).encode())).decode() answers[whistleblower_identity.id] = '' else: wbi = answers[whistleblower_identity.id][0] db_set_internaltip_data(session, itip.id, 'identity_provided', True, False) db_set_internaltip_data(session, itip.id, 'whistleblower_identity', wbi, crypto_is_available) if crypto_is_available: answers = base64.b64encode(GCE.asymmetric_encrypt(itip.crypto_tip_pub_key, json.dumps(answers).encode())).decode() else: db_save_questionnaire_answers(session, tid, itip.id, answers) db_set_internaltip_answers(session, itip.id, questionnaire_hash, answers, crypto_is_available) for filedesc in token.uploaded_files: new_file = models.InternalFile() new_file.tid = tid new_file.encrypted = crypto_is_available new_file.name = filedesc['name'] new_file.description = "" new_file.content_type = filedesc['type'] new_file.size = filedesc['size'] new_file.internaltip_id = itip.id new_file.submission = filedesc['submission'] new_file.filename = filedesc['filename'] session.add(new_file) log.debug("=> file associated %s|%s (%d bytes)", new_file.name, new_file.content_type, new_file.size) if context.maximum_selectable_receivers > 0 and \ len(request['receivers']) > context.maximum_selectable_receivers: raise errors.InputValidationError("selected an invalid number of recipients") for user in session.query(models.User).filter(models.User.id.in_(request['receivers'])): if not crypto_is_available and not user.pgp_key_public and not State.tenant_cache[tid].allow_unencrypted: continue _tip_key = b'' if crypto_is_available: _tip_key = GCE.asymmetric_encrypt(user.crypto_pub_key, crypto_tip_prv_key) db_create_receivertip(session, user, itip, can_access_whistleblower_identity, _tip_key) return { 'receipt': receipt, 'score': itip.total_score }
def fail_startup(excep): log.err("ERROR: Cannot start GlobaLeaks. Please manually examine the exception.") log.err("EXCEPTION: %s", excep) log.debug('TRACE: %s', traceback.format_exc(excep)) if reactor.running: reactor.stop()
def sendmail(tid, smtp_host, smtp_port, security, authentication, username, password, from_name, from_address, to_address, subject, body, anonymize=True, socks_host='127.0.0.1', socks_port=9050): """ Send an email using SMTPS/SMTP+TLS and maybe torify the connection. :param tid: A tenant id :param smtp_host: A SMTP host :param smtp_port: A SMTP port :param security: A type of security to be applied (SMTPS/SMTP+TLS) :param authentication: A boolean to enable authentication :param username: A mail account username :param password: A mail account password :param from_name: A from name :param from_address: A from address :param to_address: The to address :param subject: A mail subject :param body: A mail body :param anonymize: A boolean to enable anonymous mail connection :param socks_host: A socks host to be used for the mail connection :param socks_port: A socks port to be used for the mail connection :return: A deferred resource resolving at the end of the connection """ try: timeout = 30 message = MIME_mail_build(from_name, from_address, to_address, to_address, subject, body) log.debug('Sending email to %s using SMTP server [%s:%d] [%s]', to_address, smtp_host, smtp_port, security, tid=tid) context_factory = TLSClientContextFactory() smtp_deferred = defer.Deferred() factory = ESMTPSenderFactory( username.encode() if authentication else None, password.encode() if authentication else None, from_address, to_address, message, smtp_deferred, contextFactory=context_factory, requireAuthentication=authentication, requireTransportSecurity=(security == 'TLS'), retries=0, timeout=timeout) if security == "SSL": factory = tls.TLSMemoryBIOFactory(context_factory, True, factory) if anonymize: socksProxy = TCP4ClientEndpoint(reactor, socks_host, socks_port, timeout=timeout) endpoint = SOCKS5ClientEndpoint(smtp_host, smtp_port, socksProxy) else: endpoint = TCP4ClientEndpoint(reactor, smtp_host, smtp_port, timeout=timeout) conn_deferred = endpoint.connect(factory) final = defer.DeferredList([conn_deferred, smtp_deferred], fireOnOneErrback=True, consumeErrors=True) def failure_cb(failure): """ :param failure {Failure {twisted.internet.FirstError {Failure}}} """ log.err("SMTP connection failed (Exception: %s)", failure.value.subFailure.value, tid=tid) return False def success_cb(results): """ :param results {list of (success, return_val) tuples} """ return True return final.addCallbacks(success_cb, failure_cb) except Exception as e: # avoids raising an exception inside email logic to avoid chained errors log.err("Unexpected exception in sendmail: %s", e, tid=tid) return defer.succeed(False)
def login(session, tid, username, password, authcode, client_using_tor, client_ip): """ login returns a session """ user = None users = session.query(User).filter(User.username == username, User.state != u'disabled', UserTenant.user_id == User.id, UserTenant.tenant_id == tid).distinct() for u in users: if GCE.check_password(u.hash_alg, password, u.salt, u.password): user = u break # Fix for issue: https://github.com/globaleaks/GlobaLeaks/issues/2563 if State.tenant_cache[1].creation_date < datetime.timestamp(datetime(2019, 5, 3, 0, 0)): u_password = '******'' + u.password + '\'' if GCE.check_password(u.hash_alg, password, u.salt, u_password): user = u break if user is None: log.debug("Login: Invalid credentials") Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication connection_check(client_ip, tid, user.role, client_using_tor) if State.tenant_cache[1].two_factor_auth and user.last_login != datetime_null(): token = TwoFactorTokens.get(user.id) if token is not None and authcode != '': if token.token == authcode: TwoFactorTokens.revoke(user.id) else: raise errors.InvalidTwoFactorAuthCode elif token is None and authcode == '': token = TwoFactorTokens.new(user.id) data = { 'type': '2fa', 'authcode': str(token.token) } data['node'] = db_admin_serialize_node(session, tid, user.language) data['notification'] = db_get_notification(session, tid, user.language) subject, body = Templating().get_mail_subject_and_body(data) State.sendmail(1, user.mail_address, subject, body) raise errors.TwoFactorAuthCodeRequired else: raise errors.TwoFactorAuthCodeRequired user.last_login = datetime_now() crypto_prv_key = '' if State.tenant_cache[1].encryption and user.crypto_prv_key: user_key = GCE.derive_key(password.encode('utf-8'), user.salt) crypto_prv_key = GCE.symmetric_decrypt(user_key, user.crypto_prv_key) return Sessions.new(tid, user.id, user.role, user.password_change_needed, crypto_prv_key)
def login(session, tid, username, password, authcode, client_using_tor, client_ip): """ Login transaction for users' access :param session: An ORM session :param tid: A tenant ID :param username: A provided username :param password: A provided password :param authcode: A provided authcode :param client_using_tor: A boolean signaling Tor usage :param client_ip: The client IP :return: Returns a user session in case of success """ user = None for u in session.query(User).filter(User.username == username, User.state == 'enabled', User.tid == tid): if GCE.check_password(u.hash_alg, password, u.salt, u.password): user = u break # Fix for issue: https://github.com/globaleaks/GlobaLeaks/issues/2563 if State.tenant_cache[1].creation_date < 1551740400: u_password = '******'' + u.password + '\'' if GCE.check_password(u.hash_alg, password, u.salt, u_password): user = u break if user is None: log.debug("Login: Invalid credentials") Settings.failed_login_attempts += 1 raise errors.InvalidAuthentication connection_check(tid, client_ip, user.role, client_using_tor) crypto_prv_key = '' if user.crypto_prv_key: user_key = GCE.derive_key(password.encode(), user.salt) crypto_prv_key = GCE.symmetric_decrypt(user_key, Base64Encoder.decode(user.crypto_prv_key)) elif State.tenant_cache[tid].encryption: # Force the password change on which the user key will be created user.password_change_needed = True # Require password change if password change threshold is exceeded if State.tenant_cache[tid].password_change_period > 0 and \ user.password_change_date < datetime_now() - timedelta(days=State.tenant_cache[tid].password_change_period): user.password_change_needed = True if user.two_factor_enable: if authcode != '': # RFC 6238: step size 30 sec; valid_window = 1; total size of the window: 1.30 sec if not pyotp.TOTP(user.two_factor_secret).verify(authcode, valid_window=1): raise errors.InvalidTwoFactorAuthCode else: raise errors.TwoFactorAuthCodeRequired user.last_login = datetime_now() return Sessions.new(tid, user.id, user.tid, user.role, user.password_change_needed, user.two_factor_enable, crypto_prv_key, user.crypto_escrow_prv_key)
def process_files(state, receiverfiles_maps): """ @param receiverfiles_maps: the mapping of ifile/rfiles to be created on filesystem @return: return None """ for ifile_id, receiverfiles_map in receiverfiles_maps.items(): ifile_name = receiverfiles_map['ifile_name'] plain_name = "%s.plain" % ifile_name.split('.')[0] plain_path = os.path.abspath( os.path.join(Settings.attachments_path, plain_name)) sf = state.get_tmp_file_by_name(ifile_name) receiverfiles_map['plaintext_file_needed'] = False for rcounter, rfileinfo in enumerate(receiverfiles_map['rfiles']): if rfileinfo['receiver']['pgp_key_public']: try: new_filename, new_size = fsops_pgp_encrypt( state, sf, rfileinfo['receiver']['pgp_key_public'], rfileinfo['receiver']['pgp_key_fingerprint']) log.debug( "%d# Switch on Receiver File for %s filename %s => %s size %d => %d", rcounter, rfileinfo['receiver']['name'], rfileinfo['filename'], new_filename, rfileinfo['size'], new_size) rfileinfo['filename'] = new_filename rfileinfo['size'] = new_size rfileinfo['status'] = u'encrypted' except Exception as excep: log.err( "%d# Unable to complete PGP encrypt for %s on %s: %s. marking the file as unavailable.", rcounter, rfileinfo['receiver']['name'], rfileinfo['filename'], excep) rfileinfo['status'] = u'unavailable' elif state.tenant_cache[ receiverfiles_map['tid']].allow_unencrypted: receiverfiles_map['plaintext_file_needed'] = True rfileinfo['filename'] = plain_name rfileinfo['status'] = u'reference' else: rfileinfo['status'] = u'nokey' if receiverfiles_map['plaintext_file_needed']: log.debug( "Not all receivers support PGP and the system allows plaintext version of files: %s saved as plaintext file %s", ifile_name, plain_name) try: with sf.open('rb') as encrypted_file, open( plain_path, "a+b") as plaintext_file: while True: chunk = encrypted_file.read(4096) if not chunk: break plaintext_file.write(chunk) receiverfiles_map['ifile_name'] = plain_name except Exception as excep: log.err("Unable to create plaintext file %s: %s", plain_path, excep) else: log.debug( "All receivers support PGP or the system denies plaintext version of files: marking internalfile as removed" )