Example #1
0
    def encrypt_file(self, key_fingerprint, plainpath, filestream, output_path):
        """
        @param pgp_key_public:
        @param plainpath:
        @return:
        """
        encrypt_obj = self.pgph.encrypt_file(filestream, str(key_fingerprint))

        if not encrypt_obj.ok:
            raise errors.PGPKeyInvalid

        log.debug("Encrypting for key %s file %s (%d bytes)" %
                  (key_fingerprint,
                   plainpath, len(str(encrypt_obj))))

        encrypted_path = os.path.join(os.path.abspath(output_path),
                                      "pgp_encrypted-%s" % rstr.xeger(r'[A-Za-z0-9]{16}'))

        try:
            with open(encrypted_path, "w+") as f:
                f.write(str(encrypt_obj))

            return encrypted_path, len(str(encrypt_obj))
        except Exception as excep:
            log.err("Error in writing PGP file output: %s (%s) bytes %d" %
                    (excep.message, encrypted_path, len(str(encrypt_obj)) ))
            raise errors.InternalServerError("Error in writing [%s]" % excep.message)
Example #2
0
def longlocal_v(_self, attr, value):

    dict_v(None, attr, value)

    if not value:
        return value

    """
    If a language does not exist, it does not mean that a malicious input have been provided,
    this condition in fact may happen when a language is removed from the package and 
    so the proper way to handle it so is simply to log the issue and discard the input.
    https://github.com/globaleaks/GlobaLeaks/issues/879
    """
    remove = [lang for lang in value if lang not in LANGUAGES_SUPPORTED_CODES]
    for k in remove:
       try:
         del value[unicode(lang)]
       except KeyError:
         pass
       log.debug("(%s) Invalid language code in %s, ignoring it" % (lang, attr))

    for lang, text in value.iteritems():
        longtext_v(None, attr, text)

    return value
Example #3
0
    def encrypt_message(self, key_fingerprint, plaintext):
        """
        @param plaindata:
            An arbitrary long text that would be encrypted

        @param receiver_desc:

            The output of
                globaleaks.handlers.admin.admin_serialize_receiver()
            dictionary. It contain the fingerprint of the Receiver PUBKEY

        @return:
            The unicode of the encrypted output (armored)

        """
        # This second argument may be a list of fingerprint, not just one
        encrypt_obj = self.pgph.encrypt(plaintext, str(key_fingerprint))

        if not encrypt_obj.ok:
            raise errors.PGPKeyInvalid

        log.debug("Encrypting for key %s %d byte of plain data (%d cipher output)" %
                  (key_fingerprint,
                   len(plaintext), len(str(encrypt_obj))))

        return str(encrypt_obj)
Example #4
0
    def operation():
        """
        Goal of this function is to check all the submission not
        finalized, and, if the expiration time sets in the context has
        been reached, then clean the submission_gus along with the fields,
        and, if present, the uploaded folder/files.

        Second goal of this function, is to check all the InternalTip(s)
        and their expiration date, if match, remove that, all the folder,
        comment and tip related.
        """
        try:
            submissions = yield get_tiptime_by_marker(InternalTip._marker[0])  # Submission
            log.debug("(Cleaning routines) %d unfinished Submission are check if expired" % len(submissions))
            for submission in submissions:
                if is_expired(iso2dateobj(submission["creation_date"]), seconds=submission["submission_life_seconds"]):
                    log.info(
                        "Deleting an unfinalized Submission (creation date: %s) files %d"
                        % (submission["creation_date"], submission["files"])
                    )
                    yield itip_cleaning(submission["id"])

            tips = yield get_tiptime_by_marker(InternalTip._marker[2])  # First
            log.debug("(Cleaning routines) %d Tips stored are check if expired" % len(tips))
            for tip in tips:
                if is_expired(iso2dateobj(tip["creation_date"]), seconds=tip["tip_life_seconds"]):
                    log.info(
                        "Deleting an expired Tip (creation date: %s) files %d comments %d"
                        % (tip["creation_date"], tip["files"], tip["comments"])
                    )
                    yield itip_cleaning(tip["id"])

        except Exception as excep:
            log.err("Exception failure in submission/tip cleaning routine (%s)" % excep.message)
            sys.excepthook(*sys.exc_info())
Example #5
0
def assign_rtip_label(store, user_id, tip_id, label_content):
    rtip = db_access_tip(store, user_id, tip_id)
    if rtip.label:
        log.debug("Updating ReceiverTip label from '%s' to '%s'" % (rtip.label, label_content))
    else:
        log.debug("Assigning ReceiverTip label '%s'" % label_content)
    rtip.label = unicode(label_content)
Example #6
0
    def post(self, lang):
        """
        Upload a custom language file
        """
        uploaded_file = self.get_file_upload()
        if uploaded_file is None:
            self.set_status(201)
            self.finish()
            return

        path = self.custom_langfile_path(lang)
        directory_traversal_check(GLSettings.static_path_l10n, path)

        try:
            dumped_file = yield threads.deferToThread(dump_static_file, uploaded_file, path)
        except OSError as excpd:
            log.err("OSError while create a new custom lang file [%s]: %s" % (path, excpd))
            raise errors.InternalServerError(excpd.strerror)
        except Exception as excpd:
            log.err("Unexpected exception: %s" % excpd)
            raise errors.InternalServerError(excpd)

        log.debug("Admin uploaded new lang file: %s" % dumped_file['filename'])

        self.set_status(201)  # Created
        self.finish(dumped_file)
Example #7
0
    def create_key(self):
        """
        Create the AES Key to encrypt uploaded file.
        """
        self.key = os.urandom(GLSettings.AES_key_size)

        self.key_id = rstr.xeger(GLSettings.AES_key_id_regexp)
        self.keypath = os.path.join(GLSettings.ramdisk_path, "%s%s" %
                                    (GLSettings.AES_keyfile_prefix, self.key_id))

        while os.path.isfile(self.keypath):
            self.key_id = rstr.xeger(GLSettings.AES_key_id_regexp)
            self.keypath = os.path.join(GLSettings.ramdisk_path, "%s%s" %
                                        (GLSettings.AES_keyfile_prefix, self.key_id))

        self.key_counter_nonce = os.urandom(GLSettings.AES_counter_nonce)
        self.initialize_cipher()

        saved_struct = {
            'key': self.key,
            'key_counter_nonce': self.key_counter_nonce
        }

        log.debug("Key initialization at %s" % self.keypath)

        with open(self.keypath, 'w') as kf:
            pickle.dump(saved_struct, kf)

        if not os.path.isfile(self.keypath):
            log.err("Unable to write keyfile %s" % self.keypath)
            raise Exception("Unable to write keyfile %s" % self.keypath)
Example #8
0
    def encrypt_message(self, plaintext):
        """
        @param plaindata:
            An arbitrary long text that would be encrypted

        @param receiver_desc:

            The output of
                globaleaks.handlers.admin.admin_serialize_receiver()
            dictionary. It contain the fingerprint of the Receiver PUBKEY

        @return:
            The unicode of the encrypted output (armored)

        """
        if not self.validate_key(self.receiver_desc['gpg_key_armor']):
            raise errors.GPGKeyInvalid

        # This second argument may be a list of fingerprint, not just one
        encrypt_obj = self.gpgh.encrypt(plaintext, str(self.receiver_desc['gpg_key_fingerprint']) )

        if not encrypt_obj.ok:
            # else, is not .ok
            log.err("Falure in encrypting %d bytes for %s (%s)" % (len(plaintext),
                    self.receiver_desc['username'], self.receiver_desc['gpg_key_fingerprint']) )
            log.err(encrypt_obj.stderr)
            raise errors.GPGKeyInvalid

        log.debug("Encrypting for %s (%s) %d byte of plain data (%d cipher output)" %
                  (self.receiver_desc['username'], self.receiver_desc['gpg_key_fingerprint'],
                   len(plaintext), len(str(encrypt_obj))) )

        return str(encrypt_obj)
    def process_mail_creation(self, store, data):
        receiver_id = data['receiver']['id']

        # Do not spool emails if the receiver has opted out of ntfns for this tip.
        if not data['tip']['enable_notifications']:
          log.debug("Discarding emails for %s due to receiver's preference." % receiver_id)
          return

        # https://github.com/globaleaks/GlobaLeaks/issues/798
        # TODO: the current solution is global and configurable only by the admin
        sent_emails = GLSettings.get_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.debug("Discarding emails for receiver %s due to threshold already exceeded for the current hour" %
                      receiver_id)
            return

        GLSettings.increment_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.info("Reached threshold of %d emails with limit of %d for receiver %s" % (
                     sent_emails,
                     GLSettings.memory_copy.notification_threshold_per_hour,
                     receiver_id)
            )

            # simply changing the type of the notification causes
            # to send the notification_limit_reached
            data['type'] = u'receiver_notification_limit_reached'

        data['notification'] = db_get_notification(store, data['receiver']['language'])
        data['node'] = db_admin_serialize_node(store, data['receiver']['language'])

        if not data['node']['allow_unencrypted'] and data['receiver']['pgp_key_status'] != u'enabled':
            return

        subject, body = Templating().get_mail_subject_and_body(data)

        # If the receiver has encryption enabled encrypt the mail body
        if data['receiver']['pgp_key_status'] == u'enabled':
            gpob = GLBPGP()

            try:
                gpob.load_key(data['receiver']['pgp_key_public'])
                body = gpob.encrypt_message(data['receiver']['pgp_key_fingerprint'], body)
            except Exception as excep:
                log.err("Error in PGP interface object (for %s: %s)! (notification+encryption)" %
                        (data['receiver']['username'], str(excep)))

                return
            finally:
                # the finally statement is always called also if
                # except contains a return or a raise
                gpob.destroy_environment()

        mail = models.Mail({
            'address': data['receiver']['mail_address'],
            'subject': subject,
            'body': body
        })

        store.add(mail)
Example #10
0
    def operation(self):
        # ------- BEGIN Anomalies section -------
        anomalies_to_save = get_anomalies()
        yield save_anomalies(anomalies_to_save)
        # ------- END Anomalies section ---------

        # ------- BEGIN Stats section -----------
        current_time = datetime_now()
        statistic_summary = get_statistics()
        yield save_statistics(self.collection_start_time,
                              current_time, statistic_summary)
        # ------- END Stats section -------------

        # ------- BEGIN Mail thresholds management -----------
        GLSettings.exceptions = {}
        GLSettings.exceptions_email_count = 0

        for k, v in GLSettings.mail_counters.iteritems():
            if v > GLSettings.memory_copy.notification_threshold_per_hour:
                GLSettings.mail_counters[k] -= GLSettings.memory_copy.notification_threshold_per_hour
            else:
                GLSettings.mail_counters[k] = 0
        # ------- END Mail thresholds management -----------

        self.reset()

        log.debug("Saved stats and time updated, keys saved %d" % len(statistic_summary.keys()))
Example #11
0
        def call_handler(cls, *args, **kwargs):
            """
            If not yet auth, is redirected
            If is logged with the right account, is accepted
            If is logged with the wrong account, is rejected with a special message
            """

            if not cls.current_user:
                raise errors.NotAuthenticated

            # we need to copy the role as after update_session it may no exist anymore
            copy_role = cls.current_user.role

            if not update_session(cls.current_user):
                if copy_role == 'admin':
                    raise errors.AdminSessionExpired()
                elif copy_role == 'wb':
                    raise errors.WBSessionExpired()
                elif copy_role == 'receiver':
                    raise errors.ReceiverSessionExpired()
                else:
                    raise AssertionError("Developer mistake: %s" % cls.current_user)

            if role == '*' or role == cls.current_user.role:
                log.debug("Authentication OK (%s)" % cls.current_user.role )
                return method_handler(cls, *args, **kwargs)

            # else, if role != cls.current_user.role

            error = "Good login in wrong scope: you %s, expected %s" % \
                    (cls.current_user.role, role)

            log.err(error)

            raise errors.InvalidScopeAuth(error)
    def operation(self):
        """
        This scheduler is responsible of:
            - Removal of expired sessions
            - Reset of failed login attempts counters
        """

        # Removal of expired sessions
        try:
            # this list is needed because we can't do "del"
            # on a list during a loop on it without breaking the loop.
            sid_to_remove = []

            for session_id in GLSetting.sessions:
                checkd_session = GLSetting.sessions[session_id]

                if is_expired(checkd_session.refreshdate,
                              seconds=GLSetting.defaults.lifetimes[checkd_session.role]):
                    sid_to_remove.append(session_id)

            for expired_sid in sid_to_remove:
                del GLSetting.sessions[expired_sid]

            if len(sid_to_remove):
                log.debug("Expired %d sessions" % len(sid_to_remove))

        except Exception as excep:
            log.err("Exception failure in session cleaning routine (%s)" % excep.message)
            sys.excepthook(*sys.exc_info())

        # Reset of failed login attempts counters
        GLSetting.failed_login_attempts = 0
Example #13
0
        def call_handler(cls, *args, **kwargs):
            """
            GLSetting contain the copy of the latest admin configuration, this
            enhance performance instead of searching in te DB at every handler
            connection.
            """
            assert wrapped_handler_role in tor2web_roles
            if wrapped_handler_role == 'tip':
                accept_tor2web = GLSetting.memory_copy.tor2web_tip
            elif wrapped_handler_role == 'submission':
                accept_tor2web = GLSetting.memory_copy.tor2web_submission
            elif wrapped_handler_role == 'receiver':
                accept_tor2web = GLSetting.memory_copy.tor2web_receiver
            elif wrapped_handler_role == 'admin':
                accept_tor2web = GLSetting.memory_copy.tor2web_admin
            else:
                accept_tor2web = GLSetting.memory_copy.tor2web_unauth

            are_we_tor2web = get_tor2web_header(cls.request.headers)

            if are_we_tor2web and accept_tor2web == False:
                log.err("Receiver connection on Tor2Web for role %s: forbidden in %s" %
                    (wrapped_handler_role, cls.request.uri) )
                raise errors.TorNetworkRequired

            if are_we_tor2web:
                log.debug("Accepted Tor2Web connection for role '%s' in uri %s" %
                    (wrapped_handler_role, cls.request.uri) )

            return method_handler(cls, *args, **kwargs)
Example #14
0
def update_session(user):
    """
    Returns True if the session is still valid, False instead.
    Timed out sessions are destroyed.
    """
    session_info = GLSetting.sessions[user.id]
    
    if utility.is_expired(session_info.refreshdate,
                          seconds=GLSetting.defaults.lifetimes[user.role]):

        log.debug("Authentication Expired (%s) %s seconds" % (
                  user.role,
                  GLSetting.defaults.lifetimes[user.role] ))

        del GLSetting.sessions[user.id]
        
        return False

    else:

        # update the access time to the latest
        GLSetting.sessions[user.id].refreshdate = utility.datetime_now()
        GLSetting.sessions[user.id].expirydate = utility.get_future_epoch(
            seconds=GLSetting.defaults.lifetimes[user.role])

        return True
Example #15
0
    def create_key(self):
        """
        Create the AES Key to encrypt uploaded file.
        """
        self.key = os.urandom(GLSettings.AES_key_size)

        self.key_id = generateRandomKey(16)
        self.keypath = os.path.join(GLSettings.ramdisk_path, "%s%s" %
                                    (GLSettings.AES_keyfile_prefix, self.key_id))

        while os.path.isfile(self.keypath):
            self.key_id = generateRandomKey(16)
            self.keypath = os.path.join(GLSettings.ramdisk_path, "%s%s" %
                                        (GLSettings.AES_keyfile_prefix, self.key_id))

        self.key_counter_nonce = os.urandom(GLSettings.AES_counter_nonce)
        self.initialize_cipher()

        key_json = {
            'key': base64.b64encode(self.key),
            'key_counter_nonce': base64.b64encode(self.key_counter_nonce)
        }

        log.debug("Key initialization at %s" % self.keypath)

        with open(self.keypath, 'w') as kf:
            json.dump(key_json, kf)

        if not os.path.isfile(self.keypath):
            log.err("Unable to write keyfile %s" % self.keypath)
            raise Exception("Unable to write keyfile %s" % self.keypath)
Example #16
0
def receiverfile_planning(store):
    """
    This function roll over the InternalFile uploaded, extract a path, id and
    receivers associated, one entry for each combination. representing the
    ReceiverFile that need to be created.
    """
    receiverfiles_maps = {}

    for ifile in store.find(InternalFile, InternalFile.new == True):
        if ifile.processing_attempts >= INTERNALFILES_HANDLE_RETRY_MAX:
            ifile.new = False
            error = "Failed to handle receiverfiles creation for ifile %s (%d retries)" % \
                    (ifile.id, INTERNALFILES_HANDLE_RETRY_MAX)
            log.err(error)
            continue

        elif ifile.processing_attempts >= 1:
            log.err("Failed to handle receiverfiles creation for ifile %s (retry %d/%d)" %
                    (ifile.id, ifile.processing_attempts, INTERNALFILES_HANDLE_RETRY_MAX))


        if ifile.processing_attempts:
            log.debug("Starting handling receiverfiles creation for ifile %s retry %d/%d" %
                  (ifile.id, ifile.processing_attempts, INTERNALFILES_HANDLE_RETRY_MAX))

        ifile.processing_attempts += 1

        for rtip in ifile.internaltip.receivertips:
            receiverfile = ReceiverFile()
            receiverfile.internalfile_id = ifile.id
            receiverfile.receivertip_id = rtip.id
            receiverfile.file_path = ifile.file_path
            receiverfile.size = ifile.size
            receiverfile.status = u'processing'

            # https://github.com/globaleaks/GlobaLeaks/issues/444
            # avoid to mark the receiverfile as new if it is part of a submission
            # this way we avoid to send unuseful messages
            receiverfile.new = False if ifile.submission else True

            store.add(receiverfile)

            if ifile.id not in receiverfiles_maps:
                receiverfiles_maps[ifile.id] = {
                  'plaintext_file_needed': False,
                  'ifile_id': ifile.id,
                  'ifile_path': ifile.file_path,
                  'ifile_size': ifile.size,
                  'rfiles': []
                }

            receiverfiles_maps[ifile.id]['rfiles'].append({
                'id': receiverfile.id,
                'status': u'processing',
                'path': ifile.file_path,
                'size': ifile.size,
                'receiver': admin_serialize_receiver(rtip.receiver, GLSettings.memory_copy.default_language)
            })

    return receiverfiles_maps
Example #17
0
def db_create_receiver(store, request, language):
    """
    Creates a new receiver
    Returns:
        (dict) the receiver descriptor
    """
    user = db_create_user(store, request, language)

    fill_localized_keys(request, models.Receiver.localized_keys, language)

    receiver = models.Receiver(request)

    # set receiver.id user.id
    receiver.id = user.id

    store.add(receiver)

    contexts = request.get('contexts', [])
    for context_id in contexts:
        context = models.Context.get(store, context_id)
        if not context:
            raise errors.ContextIdNotFound
        context.receivers.add(receiver)

    log.debug("Created new receiver")

    return receiver
Example #18
0
def parse_pgp_key(key):
    """
    Used for parsing a PGP key

    @param key
    @return: the parsed information
    """
    gnob = GLBPGP()

    try:
        k = gnob.load_key(key)

        log.debug("Parsed the PGP Key: %s" % k['fingerprint'])

        return {
            'public': key,
            'fingerprint': k['fingerprint'],
            'expiration': k['expiration']
        }
    except:
        raise

    finally:
        # the finally statement is always called also if
        # except contains a return or a raise
        gnob.destroy_environment()
Example #19
0
    def get_token_difficulty(self):
        """
        THIS FUNCTION IS NOT YET CALL

        This function return the difficulty that will be enforced in the
        token, whenever is File or Submission, here is evaluated with a dict.
        """
        self.difficulty_dict = {
            'human_captcha': False,
            'graph_captcha': False,
            'proof_of_work': False,
        }

        if Alarm.stress_levels['activity'] >= 1:
            self.difficulty_dict['graph_captcha'] = True

        if Alarm.stress_levels['disk_space'] >= 1:
            self.difficulty_dict['human_captcha'] = True

        log.debug("get_token_difficulty in %s is: HC:%s, GC:%s, PoW:%s" % (
                  self.current_time,
                  "Y" if self.difficulty_dict['human_captcha'] else "N",
                  "Y" if self.difficulty_dict['graph_captcha'] else "N",
                  "Y" if self.difficulty_dict['proof_of_work'] else "N" ) )

        return self.difficulty_dict
Example #20
0
    def operation(self):
        """
        executed every 60 minutes
        """

        for when, anomaly_blob in dict(StatisticsSchedule.RecentAnomaliesQ).iteritems():
            yield save_anomalies(when, anomaly_blob[0], anomaly_blob[1])

        StatisticsSchedule.RecentAnomaliesQ = dict()

        # Addres the statistics. the time start and end are in string
        # without the last 8 bytes, to let d3.js parse easily (or investigate),
        # creation_date, default model, is ignored in the visualisation
        current_time = datetime_now()
        statistic_summary = {}

        #  {  'id' : expired_event.event_id
        #     'when' : datetime_to_ISO8601(expired_event.creation_date)[:-8],
        #     'event' : expired_event.event_type, 'duration' :   }

        for descblob in StatisticsSchedule.RecentEventQ:
            if not descblob.has_key('event'):
                continue
            statistic_summary.setdefault(descblob['event'], 0)
            statistic_summary[descblob['event']] += 1

        yield save_statistics(StatisticsSchedule.collection_start_datetime,
                              current_time, statistic_summary)

        StatisticsSchedule.reset()
        StatisticsSchedule.collection_start_datetime = current_time

        log.debug("Saved stats and time updated, keys saved %d" %
                  len(statistic_summary.keys()))
Example #21
0
    def process_event(self, store, comment):
        comment_desc = rtip.receiver_serialize_comment(comment)

        context_desc = admin.admin_serialize_context(store,
                                                     comment.internaltip.context,
                                                     self.language)

        # for every comment, iterate on the associated receiver(s)
        log.debug("Comments from %s - Receiver(s) %d" % \
                  (comment.author, comment.internaltip.receivers.count()))

        for receiver in comment.internaltip.receivers:
            if comment.type == u'receiver' and comment.author == receiver.name:
                log.debug("Receiver is the Author (%s): skipped" % receiver.user.username)
                return

            receivertip = store.find(models.ReceiverTip,
                                     (models.ReceiverTip.internaltip_id == comment.internaltip_id,
                                      models.ReceiverTip.receiver_id == receiver.id)).one()

            tip_desc = serialize_receivertip(store, receivertip, self.language)

            do_mail, receiver_desc = self.import_receiver(receiver)

            self.events.append(Event(type=self.template_type,
                                     trigger=self.trigger,
                                     node_info={},
                                     receiver_info=receiver_desc,
                                     context_info=context_desc,
                                     tip_info=tip_desc,
                                     subevent_info=comment_desc,
                                     do_mail=do_mail))
Example #22
0
def update_notification(store, request, language=GLSetting.memory_copy.default_language):

    try:
        notif = store.find(Notification).one()
    except Exception as excep:
        log.err("Database error or application error: %s" % excep )
        raise excep

    fill_localized_keys(request, Notification.localized_strings, language)

    if request['security'] in Notification._security_types:
        notif.security = request['security']
    else:
        log.err("Invalid request: Security option not recognized")
        log.debug("Invalid Security value: %s" % request['security'])
        raise errors.InvalidInputFormat("Security selection not recognized")

    try:
        notif.update(request)
    except DatabaseError as dberror:
        log.err("Unable to update Notification: %s" % dberror)
        raise errors.InvalidInputFormat(dberror)

    if request['disable'] != GLSetting.notification_temporary_disable:
        log.msg("Switching notification mode: was %s and now is %s" %
                ("DISABLE" if GLSetting.notification_temporary_disable else "ENABLE",
                 "DISABLE" if request['disable'] else "ENABLE")
        )
        GLSetting.notification_temporary_disable = request['disable']

    return admin_serialize_notification(notif, language)
Example #23
0
 def every_notification_succeeded(self, store, result, event_id):
     if event_id:
         log.debug("Mail delivered correctly for event %s, [%s]" % (event_id, result))
         evnt = store.find(EventLogs, EventLogs.id == event_id).one()
         evnt.mail_sent = True
     else:
         log.debug("Mail (Digest|Anomaly) correctly sent")
Example #24
0
def shortlocal_v(_self, attr, value):
    """
    Validate a dict containing a localized content (a dictionary having as a key
        one of the supported languages)
    """
    dict_v(None, attr, value)

    if not value:
        return value

    # If a language does not exist, it does not mean that a malicious input have been provided,
    # this condition in fact may happen when a language is removed from the package and
    # so the proper way to handle it so is simply to log the issue and discard the input.
    # https://github.com/globaleaks/GlobaLeaks/issues/879
    remove = [lang for lang in value if lang not in LANGUAGES_SUPPORTED_CODES]
    for k in remove:
        try:
            del value[unicode(k)]
        except KeyError:
            pass
        log.debug("shortlocal validation: (%s) Invalid language code in %s, skipped" % (k, attr))

    for lang, text in value.iteritems():
        shorttext_v(None, None, text)

    return value
Example #25
0
def receiverfile_create(store, if_path, recv_path, status, recv_size, receiver_desc):
    try:
        ifile = store.find(InternalFile, InternalFile.file_path == unicode(if_path)).one()

        if not ifile:
            log.err("InternalFile with path %s not found !?" % if_path)
            raise Exception("This is bad!")

        log.debug("ReceiverFile creation for user %s, '%s' bytes %d = %s)"
                % (receiver_desc['name'], ifile.name, recv_size, status ) )

        rtrf = store.find(ReceiverTip, ReceiverTip.internaltip_id == ifile.internaltip_id,
                          ReceiverTip.receiver_id == receiver_desc['id']).one()

        receiverfile = ReceiverFile()
        receiverfile.receiver_id = receiver_desc['id']
        receiverfile.internaltip_id = ifile.internaltip_id
        receiverfile.internalfile_id = ifile.id
        receiverfile.receivertip_id = rtrf.id
        receiverfile.file_path = unicode(recv_path)
        receiverfile.size = recv_size
        receiverfile.status = unicode(status)

        store.add(receiverfile)

    except Exception as excep:
        log.err("Error when saving ReceiverFile %s for '%s': %s" %
                (if_path, receiver_desc['name'], excep.message))
Example #26
0
def postpone_expiration_date(store, user_id, rtip_id):
    rtip = db_access_rtip(store, user_id, rtip_id)

    if not (GLSettings.memory_copy.can_postpone_expiration or rtip.receiver.can_postpone_expiration):

        raise errors.ExtendTipLifeNotEnabled

    log.debug(
        "Postpone check: Node %s, Receiver %s"
        % (
            "True" if GLSettings.memory_copy.can_postpone_expiration else "False",
            "True" if rtip.receiver.can_postpone_expiration else "False",
        )
    )

    db_postpone_expiration_date(rtip)

    log.debug(
        " [%s] in %s has postponed expiration time to %s"
        % (
            rtip.receiver.user.name,
            datetime_to_pretty_str(datetime_now()),
            datetime_to_pretty_str(rtip.internaltip.expiration_date),
        )
    )
Example #27
0
def get_messages_content(store, wb_tip_id, receiver_id):
    """
    Get the messages content and mark all the unread
    messages as "read"
    """
    wb_tip = store.find(WhistleblowerTip,
                        WhistleblowerTip.id == unicode(wb_tip_id)).one()

    if not wb_tip:
        raise errors.TipReceiptNotFound

    rtip = store.find(ReceiverTip, ReceiverTip.internaltip_id == wb_tip.internaltip.id,
                      ReceiverTip.receiver_id == unicode(receiver_id)).one()

    if not rtip:
        raise errors.TipMessagesNotFound

    messages = store.find(Message, Message.receivertip_id == rtip.id)

    messages_list = []
    for msg in messages:
        messages_list.append(wb_serialize_message(msg))

        if not msg.visualized and msg.type == u'receiver':
            log.debug("Marking as readed message [%s] from %s" % (msg.content, msg.author))
            msg.visualized = True

    return messages_list
Example #28
0
def import_files(store, submission, files, finalize):
    """
    @param submission: the Storm obj
    @param files: the list of InternalFiles UUIDs
    @return:
        Look if all the files specified in the list exist,
        Look if the context *require* almost a file, raise
            an error if missed
    """
    for file_id in files:
        try:
            ifile = store.find(InternalFile, InternalFile.id == unicode(file_id)).one()
        except Exception as excep:
            log.err("Storm error, not found %s file in import_files (%s)" %
                    (file_id, excep))
            raise errors.FileGusNotFound

        ifile.internaltip_id = submission.id
    
    if finalize and submission.context.file_required and not len(files):
        log.debug("Missing file for a submission in context %s" %
                  submission.context.name)
        raise errors.FileRequiredMissing

    # commit before return
    store.commit()
Example #29
0
def register_file_db(store, uploaded_file, internaltip_id):
    """
    Remind: this is used only with fileApp - Tip append a new file,
    for the submission section, we relay on Token to keep track of the
    associated file, and in handlers/submission.py InternalFile(s) are
    created.

    :param uploaded_file: contain this struct of data:
        {
          'body': <closed file u'/home/qq/Dev/GlobaLeaks/backend/workingdir/files/encrypted_upload/lryZO8IlldGg3BS3.aes', mode 'w+b' at 0xb5b68498>,
          'body_len': 667237,
          'content_type': 'image/png',
          'encrypted_path': u'/home/XYZ/Dev/GlobaLeaks/backend/workingdir/files/submission/lryZO8IlldGg3BS3.aes',
          'filename': 'SteganographyIsImportant.png'
        }
    """
    internaltip = store.find(InternalTip,
                             InternalTip.id == internaltip_id).one()

    if not internaltip:
        log.err("File associated to a non existent Internaltip!")
        raise errors.TipIdNotFound

    new_file = InternalFile()
    new_file.name = uploaded_file['filename']
    new_file.content_type = uploaded_file['content_type']
    new_file.size = uploaded_file['body_len']
    new_file.internaltip_id = internaltip_id
    new_file.file_path = uploaded_file['encrypted_path']

    store.add(new_file)

    log.debug("=> Recorded new InternalFile %s" % uploaded_file['filename'])

    return serialize_file(new_file)
Example #30
0
def do_final_internalfile_update(store, file_path, new_marker, new_path=None):

    try:
        ifile = store.find(InternalFile,
                           InternalFile.file_path == unicode(file_path)).one()
    except Exception as stormer:
        log.err("Error in find %s: %s" % (file_path, stormer.message))
        return

    if not ifile:
        log.err("Unable to find %s" % file_path)
        return

    try:
        old_marker = ifile.mark
        ifile.mark = new_marker

        if new_path:
            ifile.file_path = new_path

        log.debug("Switched status set for InternalFile %s (%s => %s)" %(
            ifile.name, old_marker, new_marker
        ))

    except Exception as excep:
        log.err("Unable to switch mode in InternalFile %s: %s" % (ifile.name, excep) )
        if new_path:
            log.err("+ filename switch fail: %s => %s" % (ifile.file_path, new_path))
Example #31
0
    def operation(self):
        """
        Goal of this function is to check all the submission not
        finalized, and, if the expiration time sets in the context has
        been reached, then clean the submission_id along with the fields,
        and, if present, the uploaded folder/files.

        Second goal of this function, is to check all the InternalTip(s)
        and their expiration date, if match, remove that, all the folder,
        comment and tip related.

        Third goal of this function is to reset the exception counter that
        acts as limit for mail storm
        """
        try:
            # First Goal
            submissions = yield get_tiptime_by_marker(u'submission')
            log.debug(
                "(Cleaning routines) %d unfinished Submission are check if expired"
                % len(submissions))
            for submission in submissions:
                if is_expired(ISO8601_to_datetime(submission['creation_date']),
                              GLSetting.defaults.submission_seconds_of_life):
                    log.info(
                        "Deleting an unfinalized Submission (creation %s expiration %s) files %d"
                        % (submission['creation_date'],
                           submission['expiration_date'], submission['files']))
                    yield itip_cleaning(submission['id'])

            # Second Goal
            tips = yield get_tiptime_by_marker(u'first')
            log.debug(
                "(Cleaning routines) %d Tips stored are check if expired" %
                len(tips))
            for tip in tips:
                if is_expired(ISO8601_to_datetime(tip['expiration_date'])):
                    log.info(
                        "Deleting an expired Tip (creation date: %s, expiration %s) files %d comments %d"
                        % (tip['creation_date'], tip['expiration_date'],
                           tip['files'], tip['comments']))
                    yield itip_cleaning(tip['id'])

            # Third Goal: Reset of GLSetting.exceptions
            GLSetting.exceptions = {}

        except Exception as excep:
            log.err(
                "Exception failure in submission/tip cleaning routine (%s)" %
                excep.message)
            sys.excepthook(*sys.exc_info())
Example #32
0
    def create_file(store, cls, raw_key):
        db_cfg = load_tls_dict(store)
        db_cfg['ssl_key'] = raw_key

        prv_fact = PrivateFactory(store)
        pkv = cls.validator()
        ok, _ = pkv.validate(db_cfg)
        if ok:
            prv_fact.set_val(u'https_priv_key', raw_key)
            prv_fact.set_val(u'https_priv_gen', False)
        else:
            log.debug('Key validation failed')

        return ok
Example #33
0
    def cleaning_dead_files(self):
        """
        This function is called at the start of GlobaLeaks, in
        bin/globaleaks, and checks if the file is present in
        temporally_encrypted_dir
        """
        # temporary .aes files must be simply deleted
        for f in os.listdir(self.settings.tmp_path):
            path = os.path.join(self.settings.tmp_path, f)
            log.debug("Removing old temporary file: %s", path)

            try:
                os.remove(path)
            except OSError as excep:
                log.debug("Error while evaluating removal for %s: %s", path,
                          excep.strerror)

        # temporary .aes files with lost keys can be deleted
        # while temporary .aes files with valid current key
        # will be automagically handled by delivery sched.
        keypath = os.path.join(self.settings.tmp_path,
                               self.settings.AES_keyfile_prefix)

        for f in os.listdir(self.settings.attachments_path):
            path = os.path.join(self.settings.attachments_path, f)
            try:
                result = self.settings.AES_file_regexp_comp.match(f)
                if result is not None:
                    if not os.path.isfile("%s%s" % (keypath, result.group(1))):
                        log.debug("Removing old encrypted file (lost key): %s",
                                  path)
                        os.remove(path)
            except Exception as excep:
                log.debug("Error while evaluating removal for %s: %s", path,
                          excep)
Example #34
0
    def __init__(self, filepath):
        self.filepath = filepath

        self.key_id = os.path.basename(self.filepath).split('.')[0]

        log.debug("Opening secure file %s with %s" %
                  (self.filepath, self.key_id))

        self.file = open(self.filepath, 'r+b')

        # last argument is 'False' because the file has not to be deleted on .close()
        _TemporaryFileWrapper.__init__(self, self.file, self.filepath, False)

        self.load_key()
Example #35
0
def sync_clean_untracked_files(session):
    """
    removes files in Settings.attachments_path that are not
    tracked by InternalFile/ReceiverFile.
    """
    tracked_files = db_get_tracked_files(session)
    for filesystem_file in os.listdir(Settings.attachments_path):
        if filesystem_file not in tracked_files:
            file_to_remove = os.path.join(Settings.attachments_path, filesystem_file)
            try:
                log.debug('Removing untracked file: %s', file_to_remove)
                security.overwrite_and_remove(file_to_remove)
            except OSError:
                log.err('Failed to remove untracked file', file_to_remove)
Example #36
0
    def get(cls, resource_name, language, function, *args, **kwargs):
        try:
            if resource_name in cls.memory_cache_dict \
                    and language in cls.memory_cache_dict[resource_name]:
                returnValue(cls.memory_cache_dict[resource_name][language])

            value = yield function(*args, **kwargs)
            if resource_name not in cls.memory_cache_dict:
                cls.memory_cache_dict[resource_name] = {}
            cls.memory_cache_dict[resource_name][language] = value
            returnValue(value)
        except KeyError:
            log.debug("KeyError exception while operating on the cache; probable race")
            returnValue(None)
Example #37
0
def fill_context_request(request, language):
    fill_localized_keys(request, models.Context.localized_keys, language)

    request['tip_timetolive'] = -1 if request[
        'tip_timetolive'] < 0 else request['tip_timetolive']

    if request['select_all_receivers']:
        if request['maximum_selectable_receivers']:
            log.debug(
                "Resetting maximum_selectable_receivers (%d) because 'select_all_receivers' is True"
                % request['maximum_selectable_receivers'])
        request['maximum_selectable_receivers'] = 0

    return request
Example #38
0
def sync_clean_untracked_files(store):
    """
    removes files in GLSettings.submission_path that are not
    tracked by InternalFile/ReceiverFile.
    """
    tracked_files = db_get_tracked_files(store)
    for filesystem_file in os.listdir(GLSettings.submission_path):
        if filesystem_file not in tracked_files:
            file_to_remove = os.path.join(GLSettings.submission_path, filesystem_file)
            try:
                log.debug("Removing untracked file: %s", file_to_remove)
                security.overwrite_and_remove(file_to_remove)
            except OSError:
                log.err("Failed to remove untracked file", file_to_remove)
Example #39
0
        def call_handler(cls, *args, **kwargs):
            """
            If not yet auth, is redirected
            If is logged with the right account, is accepted
            If is logged with the wrong account, is rejected with a special message
            """
            if not cls.current_user:
                raise errors.NotAuthenticated

            if role == '*' or role == cls.current_user.user_role:
                log.debug("Authentication OK (%s)" %
                          cls.current_user.user_role)
                return method_handler(cls, *args, **kwargs)

            raise errors.InvalidAuthentication
Example #40
0
    def operation(self):
        log.debug('Fetching latest GlobaLeaks version from repository')
        packages_file = yield self.fetch_packages_file()
        versions = [
            p['Version'] for p in deb822.Deb822.iter_paragraphs(packages_file)
            if p['Package'] == 'globaleaks'
        ]
        versions.sort(key=V)

        latest_version = unicode(versions[-1])

        yield evaluate_update_notification(latest_version)

        log.debug('The newest version in the repository is: %s',
                  latest_version)
Example #41
0
def db_increment_receiver_access_count(store, user_id, tip_id):
    rtip = access_tip(store, user_id, tip_id)

    rtip.access_counter += 1
    rtip.last_access = datetime_now()

    if rtip.access_counter > rtip.internaltip.access_limit:
        raise errors.AccessLimitExceeded

    log.debug(
        "Tip %s access garanted to user %s access_counter %d on limit %d" %
        (rtip.id, rtip.receiver.name, rtip.access_counter,
         rtip.internaltip.access_limit))

    return rtip.access_counter
Example #42
0
def save_anomalies(store, anomalies_list):
    anomalies_counter = 0
    for anomaly in anomalies_list:
        anomalies_counter += 1
        anomaly_date, anomaly_desc, alarm_raised = anomaly

        newanom = Anomalies()
        newanom.alarm = alarm_raised
        newanom.date = anomaly_date
        newanom.events = anomaly_desc
        store.add(newanom)

    if anomalies_counter:
        log.debug("save_anomalies: Saved %d anomalies collected during the last hour" %
                  anomalies_counter)
Example #43
0
    def download_rfile(self, store, user_id, file_id):
        rfile = store.find(ReceiverFile, ReceiverFile.id == file_id,
                           ReceiverFile.receivertip_id == ReceiverTip.id,
                           ReceiverTip.receiver_id == user_id).one()

        if not rfile:
            raise errors.FileIdNotFound

        log.debug("Download of file %s by receiver %s (%d)" %
                  (rfile.internalfile_id, rfile.receivertip.receiver_id,
                   rfile.downloads))

        rfile.downloads += 1

        return serializers.serialize_rfile(rfile)
Example #44
0
    def __init__(self, filedir):
        """
        filedir: directory where to store files
        """
        self.create_key()

        # XXX remind enhance file name with incremental number
        self.filepath = os.path.join(filedir, "%s.aes" % self.key_id)

        log.debug("++ Creating %s filetmp" % self.filepath)

        self.file = open(self.filepath, 'w+b')

        # last argument is 'True' because the file has to be deleted on .close()
        _TemporaryFileWrapper.__init__(self, self.file, self.filepath, True)
Example #45
0
    def get_token_difficulty(self):
        """
        This function return the difficulty that will be enforced in the
        token, whenever is File or Submission, here is evaluated with a dict.
        """
        if Alarm.stress_levels['activity'] >= 1:
            self.difficulty_dict['human_captcha'] = True

        log.debug(
            "get_token_difficulty in %s is: HC:%s, GC:%s, PoW:%s" %
            (self.current_time, "Y" if self.difficulty_dict['human_captcha']
             else "N", "Y" if self.difficulty_dict['graph_captcha'] else "N",
             "Y" if self.difficulty_dict['proof_of_work'] else "N"))

        return self.difficulty_dict
Example #46
0
def download_file(store, user_id, rtip_id, file_id):
    db_access_rtip(store, user_id, rtip_id)

    rfile = store.find(ReceiverFile, ReceiverFile.id == unicode(file_id)).one()

    if not rfile or rfile.receivertip.receiver_id != user_id:
        raise errors.FileIdNotFound

    log.debug("Download of file %s by receiver %s (%d)" %
              (rfile.internalfile_id, rfile.receivertip.receiver_id,
               rfile.downloads))

    rfile.downloads += 1

    return serialize_receiver_file(rfile)
Example #47
0
def create_receivertip(store, receiver, internaltip):
    """
    Create ReceiverTip for the required tier of Receiver.
    """
    log.debug('Creating ReceiverTip for: %s' % receiver.name)

    receivertip = ReceiverTip()
    receivertip.internaltip_id = internaltip.id
    receivertip.access_counter = 0
    receivertip.receiver_id = receiver.id
    receivertip.mark = u'not notified'

    store.add(receivertip)

    return receivertip.id
Example #48
0
    def sendError(self, exc):
        if exc.code and exc.resp:
            error = re.match(r'^([0-9\.]+) ', exc.resp)
            error_str = ""
            if error:
                error_str = error.group(1)
                key = str(exc.code) + " " + error.group(1)
                if key in smtp_errors:
                    error_str += " " + smtp_errors[key]

            log.err("Failed to contact %s:%d (SMTP Error: %.3d %s)" %
                    (smtp_host, smtp_port, exc.code, error_str))
            log.debug("Failed to contact %s:%d (SMTP Error: %.3d %s)" %
                      (smtp_host, smtp_port, exc.code, exc.resp))
        SMTPClient.sendError(self, exc)
Example #49
0
def change_password(old_password_hash, old_password, new_password, salt):
    """
    @param old_password_hash: the stored password hash.
    @param old_password: The user provided old password for password change protection.
    @param new_password: The user provided new password.
    @param salt: The salt to be used for password hashing.

    @return:
        the scrypt hash in base64 of the new password
    """
    if not check_password(old_password, salt, old_password_hash):
        log.debug("change_password(): Error - provided invalid old_password")
        raise errors.InvalidOldPassword

    return hash_password(new_password, salt)
Example #50
0
def write_upload_encrypted_to_disk(uploaded_file, destination):
    """
    @param uploaded_file: uploaded_file data struct
    @param the file destination
    @return: a descriptor dictionary for the saved file
    """
    log.debug("Moving encrypted bytes %d from file [%s] %s => %s",
              uploaded_file['size'], uploaded_file['name'],
              uploaded_file['path'], destination)

    shutil.move(uploaded_file['path'], destination)

    uploaded_file['path'] = destination

    return uploaded_file
Example #51
0
    def process_mail_creation(self, session, tid, data):
        user_id = data['user']['id']

        # Do not spool emails if the receiver has opted out of ntfns for this tip.
        if not data['tip']['enable_notifications']:
            log.debug("Discarding emails for %s due to receiver's preference.", user_id)
            return

        # https://github.com/globaleaks/GlobaLeaks/issues/798
        # TODO: the current solution is global and configurable only by the admin
        sent_emails = self.state.get_mail_counter(user_id)
        if sent_emails >= self.state.tenant_cache[tid].notification.notification_threshold_per_hour:
            log.debug("Discarding emails for receiver %s due to threshold already exceeded for the current hour",
                      user_id)
            return

        self.state.increment_mail_counter(user_id)
        if sent_emails >= self.state.tenant_cache[tid].notification.notification_threshold_per_hour:
            log.info("Reached threshold of %d emails with limit of %d for receiver %s",
                     sent_emails,
                     self.state.tenant_cache[tid].notification.notification_threshold_per_hour,
                     user_id,
                     tid=tid)

            # simply changing the type of the notification causes
            # to send the notification_limit_reached
            data['type'] = u'receiver_notification_limit_reached'

        data['notification'] = self.serialize_config(session, 'notification', tid, data['user']['language'])
        data['node'] = self.serialize_config(session, 'node', tid, data['user']['language'])

        if not data['node']['allow_unencrypted'] and len(data['user']['pgp_key_public']) == 0:
            return

        subject, body = Templating().get_mail_subject_and_body(data)

        # If the receiver has encryption enabled encrypt the mail body
        if data['user']['pgp_key_public']:
            pgpctx = PGPContext(self.state.settings.tmp_path)
            fingerprint = pgpctx.load_key(data['user']['pgp_key_public'])['fingerprint']
            body = pgpctx.encrypt_message(fingerprint, body)

        session.add(models.Mail({
            'address': data['user']['mail_address'],
            'subject': subject,
            'body': body,
            'tid': tid,
        }))
Example #52
0
    def operation(self):
        if GLSettings.memory_copy.disable_receiver_notification_emails:
            log.debug("MailFlush: Receiver(s) Notification disabled by Admin")
            return

        queue_events = yield load_complete_events()

        if not len(queue_events):
            returnValue(None)

        # remove from this list the event that has not to be sent, for example,
        # the Files uploaded during the first submission, like issue #444 (zombie edition)
        filtered_events, to_be_suppressed = filter_notification_event(
            queue_events)

        if len(to_be_suppressed):
            for eid in to_be_suppressed:
                yield mark_event_as_sent(eid)

        plugin = getattr(notification, GLSettings.notification_plugins[0])()
        # This wrap calls plugin/notification.MailNotification
        notifcb = NotificationMail(plugin)

        for qe_pos, qe in enumerate(filtered_events):
            yield notifcb.do_every_notification(qe)

            if not self.skip_sleep:
                yield deferred_sleep(2)

        # This is the notification of the ping, if configured
        receivers_synthesis = {}
        for qe in filtered_events:
            if not qe.receiver_info['ping_notification']:
                continue

            if qe.receiver_info['id'] not in receivers_synthesis:
                receivers_synthesis[qe.receiver_info['id']] = [
                    qe.receiver_info, 1
                ]
            else:
                receivers_synthesis[qe.receiver_info['id']][1] += 1

        if len(receivers_synthesis.keys()):
            # I'm taking the element [0] of the list but every element has the same
            # notification settings; this value is passed to ping_mail_flush at
            # it is needed by Templating()
            yield self.ping_mail_flush(
                filtered_events[0].notification_settings, receivers_synthesis)
Example #53
0
    def post(self):
        """
        """
        request = self.validate_message(self.request.content.read(),
                                        requests.ExceptionDesc)

        if not GLSettings.disable_client_exception_notification:
            exception_email = "URL: %s\n\n" % request['errorUrl']
            exception_email += "User Agent: %s\n\n" % request['agent']
            exception_email += "Error Message: %s\n\n" % request['errorMessage']
            exception_email += "Stacktrace:\n"
            exception_email += json.dumps(request['stackTrace'], indent=2)
            send_exception_email(exception_email)
            log.debug(
                "Received client exception and passed error to exception mail handler"
            )
Example #54
0
    def post(self):
        request = self.validate_message(self.request.content.read(),
                                        requests.ReceiptAuthDesc)

        receipt = request['receipt']

        delay = random_login_delay()
        if delay:
            yield deferred_sleep(delay)

        session = yield login_whistleblower(self.request.tid, receipt,
                                            self.request.client_using_tor)

        log.debug("Login: Success (%s)" % session.user_role)

        returnValue(session.serialize())
def save_anomalies(store, anomaly_list):
    for anomaly in anomaly_list:
        anomaly_date, anomaly_desc, alarm_raised = anomaly

        newanom = Anomalies()
        newanom.alarm = alarm_raised
        newanom.date = anomaly_date
        newanom.events = anomaly_desc
        log.debug("adding new anomaly in to the record: %s, %s, %s" %
                  (alarm_raised, anomaly_date, anomaly_desc))
        store.add(newanom)

    if len(anomaly_list):
        log.debug(
            "save_anomalies: Saved %d anomalies collected during the last hour"
            % len(anomaly_list))
Example #56
0
    def get(self, *uriargs):

        log.debug("Requested Ahmia description file")
        node_info = yield anon_serialize_node(self.request.language)

        ahmia_description = {
            "title": node_info['name'],
            "description": node_info['description'],
            # we've not yet keywords, need to add them in Node ?
            "keywords": "%s (GlobaLeaks instance)" % node_info['name'],
            "relation": node_info['public_site'],
            "language": node_info['default_language'],
            "contactInformation": u'',  # we've removed Node.email_addr
            "type": "GlobaLeaks"
        }
        self.finish(ahmia_description)
Example #57
0
            def call_handler(cls, *args, **kwargs):
                using_tor2web = cls.check_tor2web()

                if using_tor2web and not GLSettings.memory_copy.accept_tor2web_access[
                        role]:
                    log.err(
                        "Denied request on Tor2web for role %s and resource '%s'"
                        % (role, cls.request.uri))
                    raise errors.TorNetworkRequired

                if using_tor2web:
                    log.debug(
                        "Accepted request on Tor2web for role '%s' and resource '%s'"
                        % (role, cls.request.uri))

                return method_handler(cls, *args, **kwargs)
    def receiverfile_notification_succeeded(self, store, result,
                                            receiverfile_id, receiver_id):
        """
        This is called when the Receiver File notification has succeeded
        """
        rfile = store.find(models.ReceiverFile,
                           models.ReceiverFile.id == receiverfile_id).one()

        if not rfile:
            raise errors.FileIdNotFound

        rfile.mark = models.ReceiverFile._marker[1]  # 'notified'

        log.debug(
            "Email: +[Success] Notification of receiverfile %s for receiver %s"
            % (rfile.internalfile.name, rfile.receiver.user.username))
    def receiverfile_notification_failed(self, store, failure, receiverfile_id,
                                         receiver_id):
        """
        This is called when the Receiver File notification has failed.
        """
        rfile = store.find(models.ReceiverFile,
                           models.ReceiverFile.id == receiverfile_id).one()

        if not rfile:
            raise errors.FileIdNotFound

        rfile.mark = models.ReceiverFile._marker[2]  # 'unable to notify'

        log.debug(
            "Email: -[Fail] Notification of receiverfile %s for receiver %s" %
            (rfile.internalfile.name, rfile.receiver.user.username))
Example #60
0
    def perform_action(store, csr_fields):
        db_cfg = load_tls_dict(store)

        pkv = tls.PrivKeyValidator()
        ok, err = pkv.validate(db_cfg)
        if not ok or not err is None:
            raise err

        key_pair = db_cfg['ssl_key']
        try:
            csr_txt = tls.gen_x509_csr_pem(key_pair, csr_fields, GLSettings.csr_sign_bits)
            log.debug("Generated a new CSR")
            return csr_txt
        except Exception as e:
            log.err(e)
            raise errors.ValidationError('CSR gen failed')