Beispiel #1
0
    def perform_pgp_validation_checks(self, store):
        expired_or_expiring = []

        for user in store.find(models.User):
            if user.pgp_key_public and user.pgp_key_expiration != datetime_null():
                if user.pgp_key_expiration < datetime_now():
                    expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language))
                    if GLSettings.memory_copy.allow_unencrypted:
                        # The PGP key status should be downgraded only if the node
                        # accept non PGP mails/files to be sent/stored.
                        # If the node wont accept this the pgp key status
                        # will remain enabled and mail won't be sent by regular flow.
                        user.pgp_key_status = u'disabled'
                        user.pgp_key_info = None
                        user.pgp_key_public = None
                        user.pgp_key_fingerprint = None
                        user.pgp_key_expiration = None
                elif user.pgp_key_expiration < datetime_now() - timedelta(days=15):
                    expired_or_expiring.append(user_serialize_user(user, GLSettings.memory_copy.default_language))

        if expired_or_expiring:
            if not GLSettings.memory_copy.disable_admin_notification_emails:
                self.prepare_admin_pgp_alerts(store, expired_or_expiring)

            for user_desc in expired_or_expiring:
                self.prepare_user_pgp_alerts(store, user_desc)
Beispiel #2
0
def update_receiver(store, receiver_id, request, language=GLSetting.memory_copy.default_language):
    """
    Updates the specified receiver with the details.
    raises :class:`globaleaks.errors.ReceiverIdNotFound` if the receiver does
    not exist.
    """
    receiver = models.Receiver.get(store, receiver_id)

    if not receiver:
        raise errors.ReceiverIdNotFound

    fill_localized_keys(request, models.Receiver.localized_strings, language)

    mail_address = request['mail_address']

    homonymous = store.find(models.User, models.User.username == mail_address).one()
    if homonymous and homonymous.id != receiver.user_id:
        log.err("Update error: already present receiver with the requested username: %s" % mail_address)
        raise errors.ExpectedUniqueField('mail_address', mail_address)

    receiver.mail_address = mail_address

    # the email address it's also the username, stored in User
    receiver.user.username = mail_address

    receiver.user.state = request['state']

    # The various options related in manage GPG keys are used here.
    gpg_options_parse(receiver, request)

    receiver.user.language = request.get('language', GLSetting.memory_copy.default_language)
    receiver.user.timezone = request.get('timezone', GLSetting.memory_copy.default_timezone)

    password = request['password']
    if len(password):
        security.check_password_format(password)
        receiver.user.password = security.hash_password(password, receiver.user.salt)
        receiver.user.password_change_date = datetime_now()

    contexts = request.get('contexts', [])

    for context in receiver.contexts:
        receiver.contexts.remove(context)

    for context_id in contexts:
        context = models.Context.get(store, context_id)
        if not context:
            log.err("Update error: unexistent context can't be associated")
            raise errors.ContextIdNotFound
        receiver.contexts.add(context)

    receiver.last_update = datetime_now()
    try:
        receiver.update(request)
    except DatabaseError as dberror:
        log.err("Unable to update receiver %s: %s" % (receiver.name, dberror))
        raise errors.InvalidInputFormat(dberror)

    return admin_serialize_receiver(receiver, language)
Beispiel #3
0
    def check_tenant_anomalies(self, tid):
        """
        This function update the Alarm level.

        """
        self.number_of_anomalies = 0

        self.event_matrix.clear()

        for event in State.tenant_state[tid].RecentEventQ:
            self.event_matrix.setdefault(event.event_type, 0)
            self.event_matrix[event.event_type] += 1

        for event_name, threshold in ANOMALY_MAP.items():
            if event_name in self.event_matrix:
                if self.event_matrix[event_name] > threshold:
                    self.number_of_anomalies += 1

        previous_activity_sl = self.alarm_levels['activity']

        log_function = log.debug
        self.alarm_levels['activity'] = 0

        if self.number_of_anomalies == 1:
            log_function = log.info
            self.alarm_levels['activity'] = 1
        elif self.number_of_anomalies > 1:
            log_function = log.info
            self.alarm_levels['activity'] = 2

        # if there are some anomaly or we're nearby, record it.
        if self.number_of_anomalies >= 1 or self.alarm_levels['activity'] >= 1:
            State.tenant_state[tid].AnomaliesQ.append([datetime_now(), self.event_matrix, self.alarm_levels['activity']])

        if previous_activity_sl != self.alarm_levels['activity']:
            log_function("Alarm level changed from %d => %d" %
                         (previous_activity_sl,
                         self.alarm_levels['activity']))

        if State.tenant_cache[1].notification.disable_admin_notification_emails:
            return

        if not (self.alarm_levels['activity'] or self.alarm_levels['disk_space']):
            return

        if not is_expired(self.last_alarm_email, minutes=120):
            return

        self.last_alarm_email = datetime_now()

        alert = {
            'alarm_levels': self.alarm_levels,
            'measured_freespace': self.measured_freespace,
            'measured_totalspace': self.measured_totalspace,
            'event_matrix': self.event_matrix
        }

        yield generate_admin_alert_mail(tid, alert)
def db_update_status(rtip, status):
    rtip.internaltip.status_id = status

    # if the status is other than 'Non ancora presa in carico' and it's the
    # first time it's changed then we update also the related date field
    if rtip.internaltip.status_id != 'f480f110-b0fa-4461-9945-1bc170c4af6f':
        rtip.internaltip.working_date = datetime_now()

    rtip.internaltip.update_date = datetime_now()
def db_update_receiver(rtip, receiver, receiver_files):
    # updating both the rtip and the receiverfile
    old_receiver = rtip.receiver_id
    rtip.receiver_id = receiver
    rtip.internaltip.assigned_date = datetime_now()
    rtip.internaltip.update_date = datetime_now()

    for receiver_file in receiver_files:
        receiver_file.receiver_id = receiver
Beispiel #6
0
def get_receiver_list_wb(store, wb_tip_id, language):
    """
    @return:
        This function contain the serialization of the receiver, this function is
        used only by /wbtip/receivers API

        The returned struct contain information on read/unread messages
    """
    receiver_list = []

    wb_tip = store.find(WhistleblowerTip,
                        WhistleblowerTip.id == unicode(wb_tip_id)).one()

    if not wb_tip:
        raise errors.TipReceiptNotFound

    def localize_and_append_receiver(receiver, receiver_desc):
        mo = Rosetta(receiver.localized_strings)
        mo.acquire_storm_object(receiver)
        receiver_desc["description"] = mo.dump_localized_attr("description", language)
        receiver_list.append(receiver_desc)

    if not wb_tip.internaltip.receivertips.count():

        # This part of code is used when receiver tips have still not been created
        for receiver in wb_tip.internaltip.receivers:
            # This is the reduced version of Receiver serialization
            receiver_desc = {
                "name": receiver.name,
                "id": receiver.id,
                "pgp_key_status": receiver.pgp_key_status,
                "access_counter": 0,
                "message_counter": 0,
                "creation_date": datetime_to_ISO8601(datetime_now()),
            }

            localize_and_append_receiver(receiver, receiver_desc)

    else:

        for rtip in wb_tip.internaltip.receivertips:
            message_counter = store.find(Message,
                                         Message.receivertip_id == rtip.id).count()

            receiver_desc = {
                "name": rtip.receiver.name,
                "id": rtip.receiver.id,
                "pgp_key_status": rtip.receiver.pgp_key_status,
                "access_counter": rtip.access_counter,
                "message_counter": message_counter,
                "creation_date": datetime_to_ISO8601(datetime_now()),
            }

            localize_and_append_receiver(rtip.receiver, receiver_desc)

    return receiver_list
Beispiel #7
0
    def db_clean(self, session):
        # delete stats older than 1 year
        session.query(models.Stats).filter(models.Stats.start < datetime_now() - timedelta(90)).delete(synchronize_session='fetch')

        # delete anomalies older than 1 year
        session.query(models.Anomalies).filter(models.Anomalies.date < datetime_now() - timedelta(365)).delete(synchronize_session='fetch')

        # delete archived schemas not used by any existing submission
        hashes = [x[0] for x in session.query(models.InternalTipAnswers.questionnaire_hash)]
        if hashes:
            session.query(models.ArchivedSchema).filter(not_(models.ArchivedSchema.hash.in_(hashes))).delete(synchronize_session='fetch')
Beispiel #8
0
def update_receiver(store, receiver_id, request, language):
    """
    Updates the specified receiver with the details.
    raises :class:`globaleaks.errors.ReceiverIdNotFound` if the receiver does
    not exist.
    """
    receiver = models.Receiver.get(store, receiver_id)

    if not receiver:
        raise errors.ReceiverIdNotFound

    fill_localized_keys(request, models.Receiver.localized_strings, language)

    receiver.user.state = request['state']
    receiver.user.password_change_needed = request['password_change_needed']

    # The various options related in manage PGP keys are used here.
    pgp_options_parse(receiver, request)

    receiver.user.language = request.get('language', GLSetting.memory_copy.language)
    receiver.user.timezone = request.get('timezone', GLSetting.memory_copy.default_timezone)

    password = request['password']
    if len(password):
        security.check_password_format(password)
        receiver.user.password = security.hash_password(password, receiver.user.salt)
        receiver.user.password_change_date = datetime_now()

    contexts = request.get('contexts', [])

    for context in receiver.contexts:
        receiver.contexts.remove(context)

    for context_id in contexts:
        context = models.Context.get(store, context_id)
        if not context:
            raise errors.ContextIdNotFound

        receiver.contexts.add(context)

    receiver.last_update = datetime_now()

    try:
        receiver.update(request)
    except DatabaseError as dberror:
        log.err("Unable to update receiver %s: %s" % (receiver.name, dberror))
        raise errors.InvalidInputFormat(dberror)

    return admin_serialize_receiver(receiver, language)
def update_session(user):
    """
    Returns True if the session is still valid, False instead.
    Timed out sessions are destroyed.
    """
    session_info = GLSetting.sessions[user.id]
    
    if utility.is_expired(session_info.refreshdate,
                          seconds=GLSetting.defaults.lifetimes[user.role]):

        log.debug("Authentication Expired (%s) %s seconds" % (
                  user.role,
                  GLSetting.defaults.lifetimes[user.role] ))

        del GLSetting.sessions[user.id]
        
        return False

    else:

        # update the access time to the latest
        GLSetting.sessions[user.id].refreshdate = utility.datetime_now()
        GLSetting.sessions[user.id].expirydate = utility.get_future_epoch(
            seconds=GLSetting.defaults.lifetimes[user.role])

        return True
Beispiel #10
0
def login_receiver(store, username, password):
    """
    This login receiver need to collect also the amount of unsuccessful
    consecutive logins, because this element may bring to password lockdown.
    """
    receiver_user = store.find(User, User.username == username).one()

    if not receiver_user or receiver_user.role != 'receiver':
        log.debug("Receiver: Fail auth, username %s do not exists" % username)
        return False

    if not security.check_password(password, receiver_user.password, receiver_user.salt):
        receiver_user.failed_login_count += 1
        log.debug("Receiver login: Invalid password (failed: %d)" % receiver_user.failed_login_count)
        if username in GLSetting.failed_login_attempts:
            GLSetting.failed_login_attempts[username] += 1
        else:
            GLSetting.failed_login_attempts[username] = 1

        return False
    else:
        log.debug("Receiver: Authorized receiver %s" % username)
        receiver_user.last_login = datetime_now()
        receiver = store.find(Receiver, (Receiver.user_id == receiver_user.id)).one()
        return receiver.id
    def handle_file_append(self, itip_id):
        uploaded_file = self.get_file_upload()
        if uploaded_file is None:
            return

        uploaded_file['body'].avoid_delete()
        uploaded_file['body'].close()

        try:
            # First: dump the file in the filesystem,
            # and exception raised here would prevent the InternalFile recordings
            uploaded_file = yield threads.deferToThread(dump_file_fs, uploaded_file)
        except Exception as excep:
            log.err("Unable to save a file in filesystem: %s" % excep)
            raise errors.InternalServerError("Unable to accept new files")

        uploaded_file['creation_date'] = datetime_now()
        uploaded_file['submission'] = False

        try:
            # Second: register the file in the database
            yield register_file_db(uploaded_file, itip_id)
        except Exception as excep:
            log.err("Unable to register (append) file in DB: %s" % excep)
            raise errors.InternalServerError("Unable to accept new files")
Beispiel #12
0
    def update_AnomalyQ(cls, event_matrix, alarm_level):
        # called from statistics_sched
        date = datetime_to_ISO8601(datetime_now())[:-8]

        StatisticsSchedule.RecentAnomaliesQ.update({
            date: [event_matrix, alarm_level]
        })
Beispiel #13
0
    def __init__(self):
        self.current_time = datetime_now()

        self.difficulty_dict = {
            'human_captcha': False,
            'graph_captcha': False
        }
Beispiel #14
0
    def _fill_event_dict(self, event_type, event_trigger):
        """
        A notification is based on the Node, Context and Receiver values,
        that has to be taken from the database.
        """
        self.subevent = {}

        # this is requested in the file cases
        if event_type == 'ping_mail':
            self.subevent = {'counter': 42}
        elif event_type == 'admin_pgp_expiration_alert':
            self.subevent = {'expired_or_expiring': [self.receiver_dict]}
        else:
            self.subevent['name'] = ' foo '
            self.subevent['size'] = ' 123 '
            self.subevent['content_type'] = ' application/javascript '
            self.subevent['creation_date'] = datetime_now()
            self.subevent['type'] = ' sorry maker '

        self.event = Event(type=event_type,
                           trigger=event_trigger,
                           node_info=self.node_dict,
                           receiver_info=self.receiver_dict,
                           context_info=self.context_dict,
                           tip_info=self.rtip_dict,
                           subevent_info=self.subevent,
                           do_mail=False)
Beispiel #15
0
    def set_default_headers(self):
        """
        In this function are written some security enforcements
        related to WebServer versioning and XSS attacks.

        This is the first function called when a new request reach GLB
        """
        self.request.start_time = datetime_now()

        # to avoid version attacks
        self.set_header("Server", "globaleaks")

        # to reduce possibility for XSS attacks.
        self.set_header("X-Content-Type-Options", "nosniff")
        self.set_header("X-XSS-Protection", "1; mode=block")

        self.set_header("Cache-control", "no-cache, no-store, must-revalidate")
        self.set_header("Pragma", "no-cache")

        self.set_header("Expires", "-1")

        # to avoid information leakage via referrer
        self.set_header("Content-Security-Policy", "referrer no-referrer")

        # to avoid Robots spidering, indexing, caching
        if not GLSettings.memory_copy.allow_indexing:
            self.set_header("X-Robots-Tag", "noindex")

        # to mitigate clickjaking attacks on iframes allowing only same origin
        # same origin is needed in order to include svg and other html <object>
        if not GLSettings.memory_copy.allow_iframes_inclusion:
            self.set_header("X-Frame-Options", "sameorigin")
Beispiel #16
0
def db_admin_update_user(session, tid, user_id, request, language):
    """
    Updates the specified user.
    """
    fill_localized_keys(request, models.User.localized_keys, language)

    user = db_get_user(session, tid, user_id)

    if user.username != request['username']:
        check = session.query(models.User).filter(models.User.username == text_type(request['username']),
                                                  models.UserTenant.user_id == models.User.id,
                                                  models.UserTenant.tenant_id == tid).one_or_none()
        if check is not None:
            raise errors.InputValidationError('Username already in use')

    user.update(request)

    password = request['password']
    if password:
        user.hash_alg = GCE.HASH
        user.salt = GCE.generate_salt()
        user.password = GCE.hash_password(password, user.salt)
        user.password_change_date = datetime_now()
        user.crypto_prv_key = b''
        user.crypto_pub_key = b''

    # The various options related in manage PGP keys are used here.
    parse_pgp_options(user, request)

    if user.role == 'admin':
        db_refresh_memory_variables(session, [tid])

    return user
Beispiel #17
0
    def operation(self):
        # ------- BEGIN Anomalies section -------
        anomalies_to_save = get_anomalies()
        yield save_anomalies(anomalies_to_save)
        # ------- END Anomalies section ---------

        # ------- BEGIN Stats section -----------
        current_time = datetime_now()
        statistic_summary = get_statistics()
        yield save_statistics(self.collection_start_time,
                              current_time, statistic_summary)
        # ------- END Stats section -------------

        # ------- BEGIN Mail thresholds management -----------
        GLSettings.exceptions = {}
        GLSettings.exceptions_email_count = 0

        for k, v in GLSettings.mail_counters.iteritems():
            if v > GLSettings.memory_copy.notification_threshold_per_hour:
                GLSettings.mail_counters[k] -= GLSettings.memory_copy.notification_threshold_per_hour
            else:
                GLSettings.mail_counters[k] = 0
        # ------- END Mail thresholds management -----------

        self.reset()

        log.debug("Saved stats and time updated, keys saved %d" % len(statistic_summary.keys()))
    def _fill_event(self, type, trigger, trigger_id):

        if type == u'tip' and trigger == 'Tip':

            receiver_dict = yield admin.get_receiver(self.createdReceiver['receiver_gus'])
            context_dict = yield admin.get_context(self.createdContext['context_gus'])
            notif_dict = yield admin.get_notification()

            yield admin.import_memory_variables()
            node_dict = yield admin.get_node()

            self.event = Event(
                type = u'tip',
                trigger = 'Tip',
                notification_settings = notif_dict,
                node_info = node_dict,
                receiver_info = receiver_dict,
                context_info = context_dict,
                plugin = None,
                trigger_info = {
                   'id': trigger_id,
                   'creation_date': pretty_date_time(datetime_now())
                }
            )

        elif type == u'comment' and trigger == 'Comment':
            raise AssertionError("Not yet managed Mock comments")
        elif type == u'file' and trigger == 'File':
            raise AssertionError("Not yet managed Mock files")
        else:
            raise AssertionError("type and trigger maybe refactored, but you're using it bad")
Beispiel #19
0
def update_context(store, context_gus, request, language=GLSetting.memory_copy.default_language):
    """
    Updates the specified context. If the key receivers is specified we remove
    the current receivers of the Context and reset set it to the new specified
    ones.
    If no such context exists raises :class:`globaleaks.errors.ContextGusNotFound`.

    Args:
        context_gus:

        request:
            (dict) the request to use to set the attributes of the Context

    Returns:
            (dict) the serialized object updated
    """
    context = store.find(Context, Context.id == unicode(context_gus)).one()

    if not context:
         raise errors.ContextGusNotFound

    v = dict(request)

    for attr in getattr(Context, "localized_strings"):
        v[attr] = getattr(context, attr)
        v[attr][language] = unicode(request[attr])

    request = v
    
    for receiver in context.receivers:
        context.receivers.remove(receiver)

    receivers = request.get('receivers', [])
    for receiver_id in receivers:
        receiver = store.find(Receiver, Receiver.id == unicode(receiver_id)).one()
        if not receiver:
            log.err("Update error: unexistent receiver can't be associated")
            raise errors.ReceiverGusNotFound
        context.receivers.add(receiver)

    # tip_timetolive and submission_timetolive need to be converted in seconds since hours and days
    (context.submission_timetolive, context.tip_timetolive) = acquire_context_timetolive(request)

    if len(context.receipt_regexp) < 4:
        log.err("Fixing receipt regexp < 4 byte with fixme-[0-9]{13}-please")
        context.receipt_regexp = u"fixme-[0-9]{13}-please"

    try:
        fo = structures.Fields(context.localized_fields, context.unique_fields)
        fo.update_fields(language, request['fields'])
        fo.context_import(context)
    except Exception as excep:
        log.err("Unable to update fields: %s" % excep)
        raise excep

    context.last_update = utility.datetime_now()
    context.update(request)

    receipt_example = generate_example_receipt(context.receipt_regexp)
    return admin_serialize_context(context, receipt_example, language)
Beispiel #20
0
    def check_for_expiring_submissions(self, store):
        threshold = datetime_now() + timedelta(GLSettings.memory_copy.notif.tip_expiration_threshold)
        for rtip in store.find(models.ReceiverTip, models.ReceiverTip.internaltip_id == models.InternalTip.id,
                                                   models.InternalTip.expiration_date < threshold):
            user = rtip.receiver.user
            language = user.language
            node_desc = db_admin_serialize_node(store, language)
            notification_desc = db_get_notification(store, language)
            context_desc = admin_serialize_context(store, rtip.internaltip.context, language)
            receiver_desc = admin_serialize_receiver(rtip.receiver, language)
            tip_desc = serialize_rtip(store, rtip, user.language)

            data = {
               'type': u'tip_expiration',
               'node': node_desc,
               'context': context_desc,
               'receiver': receiver_desc,
               'notification': notification_desc,
               'tip': tip_desc
            }

            subject, body = Templating().get_mail_subject_and_body(data)

            mail = models.Mail({
               'address': data['receiver']['mail_address'],
               'subject': subject,
               'body': body
            })

            store.add(mail)
Beispiel #21
0
def get_dummy_file(filename=None, content=None):
    global files_count
    files_count += 1

    if filename is None:
        filename = ''.join(six.unichr(x) for x in range(0x400, 0x40A)).join('-%d' % files_count)

    content_type = u'application/octet'

    if content is None:
        content = base64.b64decode(VALID_BASE64_IMG)

    temporary_file = SecureTemporaryFile(Settings.tmp_path)

    with temporary_file.open('w') as f:
        f.write(content)
        f.finalize_write()

    State.TempUploadFiles[os.path.basename(temporary_file.filepath)] = temporary_file

    return {
        'date': datetime_now(),
        'name': filename,
        'description': 'description',
        'body': temporary_file,
        'size': len(content),
        'filename': os.path.basename(temporary_file.filepath),
        'type': content_type,
        'submission': False
    }
Beispiel #22
0
    def operation(self):
        """
        executed every 60 minutes
        """

        for when, anomaly_blob in dict(StatisticsSchedule.RecentAnomaliesQ).iteritems():
            yield save_anomalies(when, anomaly_blob[0], anomaly_blob[1])

        StatisticsSchedule.RecentAnomaliesQ = dict()

        # Addres the statistics. the time start and end are in string
        # without the last 8 bytes, to let d3.js parse easily (or investigate),
        # creation_date, default model, is ignored in the visualisation
        current_time = datetime_now()
        statistic_summary = {}

        #  {  'id' : expired_event.event_id
        #     'when' : datetime_to_ISO8601(expired_event.creation_date)[:-8],
        #     'event' : expired_event.event_type, 'duration' :   }

        for descblob in StatisticsSchedule.RecentEventQ:
            if not descblob.has_key('event'):
                continue
            statistic_summary.setdefault(descblob['event'], 0)
            statistic_summary[descblob['event']] += 1

        yield save_statistics(StatisticsSchedule.collection_start_datetime,
                              current_time, statistic_summary)

        StatisticsSchedule.reset()
        StatisticsSchedule.collection_start_datetime = current_time

        log.debug("Saved stats and time updated, keys saved %d" %
                  len(statistic_summary.keys()))
Beispiel #23
0
    def __init__(self, application, request, **kwargs):
        RequestHandler.__init__(self, application, request, **kwargs)

        self.name = type(self).__name__

        self.handler_time_analysis_begin()
        self.handler_request_logging_begin()

        self.req_id = GLSettings.requests_counter
        GLSettings.requests_counter += 1

        self.request.start_time = datetime_now()

        self.request.request_type = None
        if 'import' in self.request.arguments:
            self.request.request_type = 'import'
        elif 'export' in self.request.arguments:
            self.request.request_type = 'export'

        language = self.request.headers.get('GL-Language')

        if language is None:
            for l in self.parse_accept_language_header():
                if l in GLSettings.memory_copy.languages_enabled:
                    language = l
                    break

        if language is None or language not in GLSettings.memory_copy.languages_enabled:
            language = GLSettings.memory_copy.default_language

        self.request.language = language
        self.set_header("Content-Language", language)
Beispiel #24
0
def db_user_update_user(store, user_id, request, language):
    """
    Updates the specified user.
    This version of the function is specific for users that with comparison with
    admins can change only few things:
      - preferred language
      - the password (with old password check)
      - pgp key
    raises: globaleaks.errors.ReceiverIdNotFound` if the receiver does not exist.
    """
    user = models.User.get(store, user_id)

    if not user:
        raise errors.UserIdNotFound

    user.language = request.get('language', GLSettings.memory_copy.default_language)

    new_password = request['password']
    old_password = request['old_password']

    if len(new_password) and len(old_password):
        user.password = change_password(user.password,
                                        old_password,
                                        new_password,
                                        user.salt)

        if user.password_change_needed:
            user.password_change_needed = False

        user.password_change_date = datetime_now()

    # The various options related in manage PGP keys are used here.
    parse_pgp_options(user, request)

    return user
Beispiel #25
0
def postpone_expiration_date(store, user_id, rtip_id):
    rtip = db_access_rtip(store, user_id, rtip_id)

    if not (GLSettings.memory_copy.can_postpone_expiration or rtip.receiver.can_postpone_expiration):

        raise errors.ExtendTipLifeNotEnabled

    log.debug(
        "Postpone check: Node %s, Receiver %s"
        % (
            "True" if GLSettings.memory_copy.can_postpone_expiration else "False",
            "True" if rtip.receiver.can_postpone_expiration else "False",
        )
    )

    db_postpone_expiration_date(rtip)

    log.debug(
        " [%s] in %s has postponed expiration time to %s"
        % (
            rtip.receiver.user.name,
            datetime_to_pretty_str(datetime_now()),
            datetime_to_pretty_str(rtip.internaltip.expiration_date),
        )
    )
Beispiel #26
0
def generate_password_reset_token(session, state, tid, username_or_email, allow_admin_reset=False):
    from globaleaks.handlers.admin.notification import db_get_notification
    from globaleaks.handlers.admin.node import db_admin_serialize_node
    from globaleaks.handlers.user import user_serialize_user

    users = session.query(models.User).filter(
      or_(models.User.username == username_or_email,
          models.User.mail_address == username_or_email),
      models.UserTenant.user_id == models.User.id,
      models.UserTenant.tenant_id == tid
    ).distinct()

    for user in users:
        if not allow_admin_reset and user.role == u'admin':
            continue

        user.reset_password_token = generateRandomKey(32)
        user.reset_password_date = datetime_now()

        user_desc = user_serialize_user(session, user, user.language)

        template_vars = {
            'type': 'password_reset_validation',
            'user': user_desc,
            'reset_token': user.reset_password_token,
            'node': db_admin_serialize_node(session, 1, user.language),
            'notification': db_get_notification(session, tid, user.language)
        }

        state.format_and_send_mail(session, tid, user_desc, template_vars)
Beispiel #27
0
def register_wbfile_on_db(store, uploaded_file, receivertip_id):
    receivertip = store.find(ReceiverTip,
                             ReceiverTip.id == receivertip_id).one()

    if not receivertip:
        log.err("Cannot associate a file to a not existent receivertip!")
        raise errors.TipIdNotFound

    receivertip.update_date = datetime_now()

    new_file = WhistleblowerFile()

    new_file.name = uploaded_file['name']
    new_file.description = uploaded_file['description']

    new_file.content_type = uploaded_file['type']
    new_file.size = uploaded_file['size']
    new_file.receivertip_id = receivertip.id
    new_file.file_path = uploaded_file['path']

    store.add(new_file)

    log.debug("=> Recorded new WhistleblowerFile %s" % uploaded_file['name'])

    return serializers.serialize_wbfile(new_file)
Beispiel #28
0
    def post(self, token_id):
        """
        Parameter: internaltip_id
        Request: Unknown
        Response: Unknown
        Errors: TokenFailure
        """
        token = TokenList.get(token_id)

        log.debug("file upload with token associated: %s" % token)

        uploaded_file = self.get_file_upload()
        if uploaded_file is None:
            return

        uploaded_file['body'].avoid_delete()
        uploaded_file['body'].close()

        try:
            dst = os.path.join(GLSettings.submission_path,
                               os.path.basename(uploaded_file['path']))

            directory_traversal_check(GLSettings.submission_path, dst)

            uploaded_file = yield threads.deferToThread(write_upload_encrypted_to_disk, uploaded_file, dst)
            uploaded_file['date'] = datetime_now()
            uploaded_file['submission'] = True

            token.associate_file(uploaded_file)

        except Exception as excep:
            log.err("Unable to save file in filesystem: %s" % excep)
            raise errors.InternalServerError("Unable to accept files")

        self.set_status(201)  # Created
Beispiel #29
0
def login_whistleblower(session, tid, receipt):
    """
    login_whistleblower returns a session
    """
    x = None

    algorithms = [x[0] for x in session.query(WhistleblowerTip.hash_alg).filter(WhistleblowerTip.tid == tid).distinct()]
    if algorithms:
        hashes = []
        for alg in algorithms:
            hashes.append(GCE.hash_password(receipt, State.tenant_cache[tid].receipt_salt, alg))

        x  = session.query(WhistleblowerTip, InternalTip) \
                    .filter(WhistleblowerTip.receipt_hash.in_(hashes),
                            WhistleblowerTip.tid == tid,
                            InternalTip.id == WhistleblowerTip.id,
                            InternalTip.tid == WhistleblowerTip.tid).one_or_none()

    if x is None:
        log.debug("Whistleblower login: Invalid receipt")
        Settings.failed_login_attempts += 1
        raise errors.InvalidAuthentication

    wbtip = x[0]
    itip = x[1]

    itip.wb_last_access = datetime_now()

    crypto_prv_key = ''
    if State.tenant_cache[1].encryption and wbtip.crypto_prv_key:
        user_key = GCE.derive_key(receipt.encode('utf-8'), State.tenant_cache[tid].receipt_salt)
        crypto_prv_key = GCE.symmetric_decrypt(user_key, wbtip.crypto_prv_key)

    return Sessions.new(tid, wbtip.id, 'whistleblower', False, crypto_prv_key)
def db_admin_update_user(store, user_id, request, language):
    """
    Updates the specified user.
    raises: globaleaks.errors.ReceiverIdNotFound` if the receiver does not exist.
    """
    user = models.User.get(store, user_id)
    if not user:
        raise errors.UserIdNotFound

    fill_localized_keys(request, models.User.localized_keys, language)

    user.name = request['name']
    user.description = request['description']

    user.state = request['state']
    user.password_change_needed = request['password_change_needed']
    user.mail_address = request['mail_address']

    user.language = request.get('language', GLSettings.memory_copy.default_language)
    user.timezone = request.get('timezone', GLSettings.memory_copy.default_timezone)

    password = request['password']
    if len(password):
        security.check_password_format(password)
        user.password = security.hash_password(password, user.salt)
        user.password_change_date = datetime_now()

    # The various options related in manage PGP keys are used here.
    parse_pgp_options(user, request)

    return user
Beispiel #31
0
def register_ifile_on_db(session, tid, internaltip_id, uploaded_file):
    """
    Register a file on the database

    :param session: An ORM session
    :param tid: A tenant id
    :param internaltip_id: A id of the submission on which attaching the file
    :param uploaded_file: A file to be attached
    :return: A descriptor of the file
    """
    now = datetime_now()

    itip = session.query(models.InternalTip) \
                  .filter(models.InternalTip.id == internaltip_id,
                          models.InternalTip.status != 'closed',
                          models.InternalTip.tid == tid).one()

    itip.update_date = now
    itip.wb_last_access = now

    if itip.crypto_tip_pub_key:
        for k in ['name', 'type', 'size']:
            uploaded_file[k] = base64.b64encode(
                GCE.asymmetric_encrypt(itip.crypto_tip_pub_key,
                                       str(uploaded_file[k])))

    new_file = models.InternalFile()
    new_file.name = uploaded_file['name']
    new_file.content_type = uploaded_file['type']
    new_file.size = uploaded_file['size']
    new_file.internaltip_id = internaltip_id
    new_file.filename = uploaded_file['filename']
    new_file.submission = uploaded_file['submission']
    new_file.internaltip_id = internaltip_id

    session.add(new_file)

    return serializers.serialize_ifile(session, new_file)
Beispiel #32
0
    def process_file_upload(self):
        if b'flowFilename' not in self.request.args:
            return None

        total_file_size = int(self.request.args[b'flowTotalSize'][0])
        flow_identifier = self.request.args[b'flowIdentifier'][0]

        chunk_size = len(self.request.args[b'file'][0])
        if ((chunk_size / (1024 * 1024)) > self.state.tenant_cache[self.request.tid].maximum_filesize or
            (total_file_size / (1024 * 1024)) > self.state.tenant_cache[self.request.tid].maximum_filesize):
            log.err("File upload request rejected: file too big", tid=self.request.tid)
            raise errors.FileTooBig(self.state.tenant_cache[self.request.tid].maximum_filesize)

        if flow_identifier not in self.state.TempUploadFiles:
            self.state.TempUploadFiles.set(flow_identifier, SecureTemporaryFile(Settings.tmp_path))

        f = self.state.TempUploadFiles[flow_identifier]
        with f.open('w') as f:
            f.write(self.request.args[b'file'][0])

            if self.request.args[b'flowChunkNumber'][0] != self.request.args[b'flowTotalChunks'][0]:
                return None
            else:
                f.finalize_write()

        mime_type, _ = mimetypes.guess_type(text_type(self.request.args[b'flowFilename'][0], 'utf-8'))
        if mime_type is None:
            mime_type = 'application/octet-stream'

        self.uploaded_file = {
            'date': datetime_now(),
            'name': self.request.args[b'flowFilename'][0],
            'type': mime_type,
            'size': total_file_size,
            'filename': os.path.basename(f.filepath),
            'body': f,
            'description': self.request.args.get(b'description', [''])[0]
        }
    def operation(self):
        # ------- BEGIN Anomalies section -------
        anomalies_to_save = get_anomalies()
        save_anomalies(anomalies_to_save)
        # ------- END Anomalies section ---------

        # ------- BEGIN Stats section -----------
        current_time = datetime_now()
        statistic_summary = get_statistics()
        save_statistics(self.collection_start_time, current_time,
                        statistic_summary)
        # ------- END Stats section -------------

        # ------- BEGIN Mail thresholds management -----------
        GLSettings.exceptions = {}
        GLSettings.exceptions_email_count = 0
        GLSettings.mail_counters.clear()
        # ------- END Mail thresholds management -----------

        self.reset()

        log.debug("Saved stats and time updated, keys saved %d" %
                  len(statistic_summary.keys()))
Beispiel #34
0
def db_admin_update_user(store, user_id, request, language):
    """
    Updates the specified user.
    raises: globaleaks.errors.UserIdNotFound` if the user does not exist.
    """
    fill_localized_keys(request, models.User.localized_keys, language)

    user = models.db_get(store, models.User, id=user_id)

    user.update(request)

    password = request['password']
    if password:
        user.password = security.hash_password(password, user.salt)
        user.password_change_date = datetime_now()

    # The various options related in manage PGP keys are used here.
    parse_pgp_options(user, request)

    if user.role == 'admin':
        db_refresh_exception_delivery_list(store)

    return user
def login_whistleblower(store, receipt, using_tor2web):
    """
    login_whistleblower returns the WhistleblowerTip.id
    """
    hashed_receipt = security.hash_password(receipt, GLSettings.memory_copy.receipt_salt)
    wbtip = store.find(WhistleblowerTip,
                        WhistleblowerTip.receipt_hash == unicode(hashed_receipt)).one()

    if not wbtip:
        log.debug("Whistleblower login: Invalid receipt")
        GLSettings.failed_login_attempts += 1
        raise errors.InvalidAuthentication

    if using_tor2web and not accept_tor2web('whistleblower'):
        log.err("Denied login request on Tor2web for role 'whistleblower'")
        raise errors.TorNetworkRequired
    else:
        log.debug("Accepted login request on Tor2web for role 'whistleblower'")

    log.debug("Whistleblower login: Valid receipt")
    wbtip.last_access = utility.datetime_now()
    store.commit()  # the transact was read only! on success we apply the commit()
    return wbtip.id
Beispiel #36
0
def register_file_db(store, uploaded_file, internaltip_id):
    internaltip = store.find(InternalTip,
                             InternalTip.id == internaltip_id).one()

    if not internaltip:
        log.err("File associated to a non existent Internaltip!")
        raise errors.TipIdNotFound

    internaltip.update_date = datetime_now()

    new_file = InternalFile()
    new_file.name = uploaded_file['filename']
    new_file.content_type = uploaded_file['content_type']
    new_file.size = uploaded_file['body_len']
    new_file.internaltip_id = internaltip_id
    new_file.submission = uploaded_file['submission']
    new_file.file_path = uploaded_file['encrypted_path']

    store.add(new_file)

    log.debug("=> Recorded new InternalFile %s" % uploaded_file['filename'])

    return serialize_file(new_file)
Beispiel #37
0
def login_whistleblower(store, receipt, client_using_tor):
    """
    login_whistleblower returns the WhistleblowerTip.id
    """
    hashed_receipt = security.hash_password(
        receipt, GLSettings.memory_copy.private.receipt_salt)
    wbtip = store.find(
        WhistleblowerTip,
        WhistleblowerTip.receipt_hash == unicode(hashed_receipt)).one()

    if not wbtip:
        log.debug("Whistleblower login: Invalid receipt")
        GLSettings.failed_login_attempts += 1
        raise errors.InvalidAuthentication

    if not client_using_tor and not GLSettings.memory_copy.accept_tor2web_access[
            'whistleblower']:
        log.err("Denied login request over clear Web for role 'whistleblower'")
        raise errors.TorNetworkRequired

    log.debug("Whistleblower login: Valid receipt")
    wbtip.last_access = datetime_now()
    return wbtip.id
Beispiel #38
0
    def operation(self):
        # ------- BEGIN Anomalies section -------
        anomalies_to_save = get_anomalies()
        if anomalies_to_save:
            save_anomalies(anomalies_to_save)
            log.debug("Stored %d anomalies collected during the last hour",
                      len(anomalies_to_save))

        # ------- END Anomalies section ---------

        # ------- BEGIN Stats section -----------
        current_time = datetime_now()
        statistic_summary = get_statistics()
        if statistic_summary:
            save_statistics(GLSettings.stats_collection_start_time,
                            current_time, statistic_summary)
            log.debug("Stored statistics %s collected from %s to %s",
                      statistic_summary,
                      GLSettings.stats_collection_start_time, current_time)
        # ------- END Stats section -------------

        # Hourly Resets
        GLSettings.reset_hourly()
Beispiel #39
0
    def __init__(self, net_sockets, proxy_ip, proxy_port):
        log.info("Starting process monitor")

        self.shutting_down = False

        self.start_time = datetime_now()
        self.tls_process_pool = []
        self.cpu_count = multiprocessing.cpu_count()

        self.worker_path = os.path.join(
            os.path.dirname(os.path.realpath(__file__)), 'worker_https.py')

        self.tls_cfg = {
            'proxy_ip': proxy_ip,
            'proxy_port': proxy_port,
            'debug': log.loglevel <= logging.DEBUG,
            'site_cfgs': [],
        }

        if not net_sockets:
            log.err("No ports to bind to! Spawning processes will not work!")

        self.tls_cfg['tls_socket_fds'] = [ns.fileno() for ns in net_sockets]
Beispiel #40
0
def login_receiver(store, receiver_id, password):
    """
    This login receiver need to collect also the amount of unsuccessful
    consecutive logins, because this element may bring to password lockdown.

    login_receivers returns a tuple (receiver_id, state, pcn)
    """
    receiver = store.find(Receiver, (Receiver.id == receiver_id)).one()

    if not receiver or \
            receiver.user.role != u'receiver' or \
            receiver.user.state == u'disabled' or \
            not security.check_password(password,
                                        receiver.user.password,
                                        receiver.user.salt):
        log.debug("Receiver login: Invalid credentials")
        return False, None, None

    log.debug("Receiver login: Authorized receiver")
    receiver.user.last_login = utility.datetime_now()
    store.commit(
    )  # the transact was read only! on success we apply the commit()
    return receiver.id, receiver.user.state, receiver.user.password_change_needed
Beispiel #41
0
def db_admin_update_user(session, state, tid, user_id, request, language):
    """
    Updates the specified user.
    """
    fill_localized_keys(request, models.User.localized_keys, language)

    user = models.db_get(session, models.User, models.User.tid == tid,
                         models.User.id == user_id)

    user.update(request)

    password = request['password']
    if password:
        user.password = security.hash_password(password, user.salt)
        user.password_change_date = datetime_now()

    # The various options related in manage PGP keys are used here.
    parse_pgp_options(state, user, request)

    if user.role == 'admin':
        db_refresh_memory_variables(session, [tid])

    return user
Beispiel #42
0
def create_comment(session, tid, user_id, user_key, rtip_id, content):
    rtip, itip = db_access_rtip(session, tid, user_id, rtip_id)

    itip.update_date = rtip.last_access = datetime_now()

    comment = models.Comment()
    comment.internaltip_id = itip.id
    comment.type = u'receiver'
    comment.author_id = rtip.receiver_id

    if itip.crypto_tip_pub_key:
        comment.content = base64.b64encode(
            GCE.asymmetric_encrypt(itip.crypto_tip_pub_key, content)).decode()
    else:
        comment.content = content

    session.add(comment)
    session.flush()

    ret = serialize_comment(session, comment)
    ret['content'] = content

    return ret
Beispiel #43
0
    def handle_file_upload(self, token_id):
        token = TokenList.get(token_id)

        log.debug("file upload with token associated: %s" % token)

        uploaded_file = self.get_file_upload()
        if uploaded_file is None:
            return

        uploaded_file['body'].avoid_delete()
        uploaded_file['body'].close()

        try:
            # dump_file_fs return the new filepath inside the dictionary
            uploaded_file = yield threads.deferToThread(dump_file_fs, uploaded_file)
            uploaded_file['creation_date'] = datetime_now()

            token.associate_file(uploaded_file)

            serialize_memory_file(uploaded_file)
        except Exception as excep:
            log.err("Unable to save file in filesystem: %s" % excep)
            raise errors.InternalServerError("Unable to accept files")
Beispiel #44
0
def register_file_db(store, uploaded_file, internaltip_id):
    """
    Remind: this is used only with fileApp - Tip append a new file,
    for the submission section, we relay on Token to keep track of the
    associated file, and in handlers/submission.py InternalFile(s) are
    created.

    :param uploaded_file: contain this struct of data:
        {
          'body': <closed file u'/home/qq/Dev/GlobaLeaks/backend/workingdir/files/encrypted_upload/lryZO8IlldGg3BS3.aes', mode 'w+b' at 0xb5b68498>,
          'body_len': 667237,
          'content_type': 'image/png',
          'encrypted_path': u'/home/XYZ/Dev/GlobaLeaks/backend/workingdir/files/submission/lryZO8IlldGg3BS3.aes',
          'filename': 'SteganographyIsImportant.png'
        }
    """
    internaltip = store.find(InternalTip,
                             InternalTip.id == internaltip_id).one()

    if not internaltip:
        log.err("File associated to a non existent Internaltip!")
        raise errors.TipIdNotFound

    internaltip.update_date = datetime_now()

    new_file = InternalFile()
    new_file.name = uploaded_file['filename']
    new_file.content_type = uploaded_file['content_type']
    new_file.size = uploaded_file['body_len']
    new_file.internaltip_id = internaltip_id
    new_file.file_path = uploaded_file['encrypted_path']

    store.add(new_file)

    log.debug("=> Recorded new InternalFile %s" % uploaded_file['filename'])

    return serialize_file(new_file)
Beispiel #45
0
    def post(self):
        """
        Request: Unknown
        Response: Unknown
        Errors: TipIdNotFound
        """
        itip_id = yield get_itip_id_by_wbtip_id(self.current_user.user_id)

        uploaded_file = self.get_file_upload()
        if uploaded_file is None:
            return

        uploaded_file['body'].avoid_delete()
        uploaded_file['body'].close()

        try:
            # First: dump the file in the filesystem
            dst = os.path.join(GLSettings.submission_path,
                               os.path.basename(uploaded_file['path']))

            directory_traversal_check(GLSettings.submission_path, dst)

            uploaded_file = yield threads.deferToThread(
                write_upload_encrypted_to_disk, uploaded_file, dst)
        except Exception as excep:
            log.err("Unable to save a file in filesystem: %s" % excep)
            raise errors.InternalServerError("Unable to accept new files")

        uploaded_file['date'] = datetime_now()
        uploaded_file['submission'] = False

        try:
            # Second: register the file in the database
            yield register_ifile_on_db(uploaded_file, itip_id)
        except Exception as excep:
            log.err("Unable to register (append) file in DB: %s" % excep)
            raise errors.InternalServerError("Unable to accept new files")
Beispiel #46
0
def db_user_update_user(store, user_id, request, language):
    """
    Updates the specified user.
    This version of the function is specific for users that with comparison with
    admins can change only few things:
      - preferred language
      - preferred timezone
      - the password (with old password check)
      - pgp key
    raises: globaleaks.errors.ReceiverIdNotFound` if the receiver does not exist.
    """
    user = models.User.get(store, user_id)

    if not user:
        raise errors.UserIdNotFound

    user.language = request.get('language',
                                GLSettings.memory_copy.default_language)
    user.timezone = request.get('timezone',
                                GLSettings.memory_copy.default_timezone)

    new_password = request['password']
    old_password = request['old_password']

    if len(new_password) and len(old_password):
        user.password = change_password(user.password, old_password,
                                        new_password, user.salt)

        if user.password_change_needed:
            user.password_change_needed = False

        user.password_change_date = datetime_now()

    # The various options related in manage PGP keys are used here.
    parse_pgp_options(user, request)

    return user
Beispiel #47
0
def register_wbfile_on_db(store, uploaded_file, receivertip_id):
    rtip = store.find(ReceiverTip, ReceiverTip.id == receivertip_id).one()

    if not rtip:
        log.err("Cannot associate a file to a not existent receivertip!")
        raise errors.TipIdNotFound

    rtip.internaltip.update_date = rtip.last_access = datetime_now()

    new_file = WhistleblowerFile()

    new_file.name = uploaded_file['name']
    new_file.description = uploaded_file['description']

    new_file.content_type = uploaded_file['type']
    new_file.size = uploaded_file['size']
    new_file.receivertip_id = rtip.id
    new_file.file_path = uploaded_file['path']

    store.add(new_file)

    log.debug("=> Recorded new WhistleblowerFile %s" % uploaded_file['name'])

    return serializers.serialize_wbfile(new_file)
Beispiel #48
0
def login(store, username, password, using_tor2web):
    """
    login returns a tuple (user_id, state, pcn)
    """
    user = store.find(
        User, And(User.username == username, User.state != u'disabled')).one()

    if not user or not security.check_password(password, user.salt,
                                               user.password):
        log.debug("Login: Invalid credentials")
        GLSettings.failed_login_attempts += 1
        raise errors.InvalidAuthentication

    if using_tor2web and not GLSettings.memory_copy.accept_tor2web_access[
            user.role]:
        log.err("Denied login request on Tor2web for role '%s'" % user.role)
        raise errors.TorNetworkRequired
    else:
        log.debug("Accepted login request on Tor2web for role '%s'" %
                  user.role)

    log.debug("Login: Success (%s)" % user.role)
    user.last_login = datetime_now()
    return user.id, user.state, user.role, user.password_change_needed
Beispiel #49
0
    def fill_random_field_recursively(self, answers, field):
        field_type = field['type']

        if field_type == 'checkbox':
            value = {}
            for option in field['options']:
                value[option['id']] = 'True'
        elif field_type == 'selectbox':
            value = {'value': field['options'][0]['id']}
        elif field_type == 'date':
            value = {'value': datetime_to_ISO8601(datetime_now())}
        elif field_type == 'tos':
            value = {'value': 'True'}
        elif field_type == 'fieldgroup':
            value = {}
            for child in field['children']:
                self.fill_random_field_recursively(value, child)
        else:
            value = {
                'value':
                text_type(''.join(six.unichr(x) for x in range(0x400, 0x4FF)))
            }

        answers[field['id']] = [value]
Beispiel #50
0
def set_user_password(tid, user, password, cc):
    # Regenerate the password hash only if different from the best choice on the platform
    if user.hash_alg != 'ARGON2':
        user.hash_alg = 'ARGON2'
        user.salt = GCE.generate_salt()

    password_hash = GCE.hash_password(password, user.salt)

    # Check that the new password is different form the current password
    if user.password == password_hash:
        raise errors.PasswordReuseError

    user.password = password_hash
    user.password_change_date = datetime_now()

    State.log(tid=tid, type='change_password', user_id=user.id, object_id=user.id)

    if not State.tenant_cache[tid].encryption and cc == '':
        return None

    enc_key = GCE.derive_key(password.encode(), user.salt)
    if not cc:
        # The first password change triggers the generation
        # of the user encryption private key and its backup
        cc, user.crypto_pub_key = GCE.generate_keypair()
        user.crypto_bkp_key, user.crypto_rec_key = GCE.generate_recovery_key(cc)

    user.crypto_prv_key = Base64Encoder.encode(GCE.symmetric_encrypt(enc_key, cc))

    if State.tenant_cache[1].crypto_escrow_pub_key:
        user.crypto_escrow_bkp1_key = Base64Encoder.encode(GCE.asymmetric_encrypt(State.tenant_cache[1].crypto_escrow_pub_key, cc))

    if State.tenant_cache[tid].crypto_escrow_pub_key:
        user.crypto_escrow_bkp2_key = Base64Encoder.encode(GCE.asymmetric_encrypt(State.tenant_cache[tid].crypto_escrow_pub_key, cc))

    return cc
Beispiel #51
0
    def post(self, token_id):
        """
        Parameter: internaltip_id
        Request: Unknown
        Response: Unknown
        Errors: TokenFailure
        """
        token = TokenList.get(token_id)

        log.debug("file upload with token associated: %s" % token)

        uploaded_file = self.get_file_upload()
        if uploaded_file is None:
            return

        uploaded_file['body'].avoid_delete()
        uploaded_file['body'].close()

        try:
            dst = os.path.join(GLSettings.submission_path,
                               os.path.basename(uploaded_file['path']))

            directory_traversal_check(GLSettings.submission_path, dst)

            uploaded_file = yield threads.deferToThread(
                write_upload_encrypted_to_disk, uploaded_file, dst)
            uploaded_file['date'] = datetime_now()
            uploaded_file['submission'] = True

            token.associate_file(uploaded_file)

        except Exception as excep:
            log.err("Unable to save file in filesystem: %s" % excep)
            raise errors.InternalServerError("Unable to accept files")

        self.set_status(201)  # Created
Beispiel #52
0
    def db_check_for_expiring_submissions(self, session, tid):
        threshold = datetime_now() + timedelta(hours=self.state.tenant_cache[tid].notification.tip_expiration_threshold)

        result = session.query(models.User, func.count(models.InternalTip.id), func.min(models.InternalTip.expiration_date)) \
                        .filter(models.InternalTip.tid == tid,
                                models.ReceiverTip.internaltip_id == models.InternalTip.id,
                                models.InternalTip.expiration_date < threshold,
                                models.User.id == models.ReceiverTip.receiver_id) \
                        .group_by(models.User.id) \
                        .having(func.count(models.InternalTip.id) > 0) \
                        .all()

        for x in result:
            user = x[0]
            expiring_submission_count = x[1]
            earliest_expiration_date = x[2]

            user_desc = user_serialize_user(session, user, user.language)

            data = {
                'type': 'tip_expiration_summary',
                'node': db_admin_serialize_node(session, tid, user.language),
                'notification': db_get_notification(session, tid, user.language),
                'user': user_desc,
                'expiring_submission_count': expiring_submission_count,
                'earliest_expiration_date': earliest_expiration_date
            }

            subject, body = Templating().get_mail_subject_and_body(data)

            session.add(models.Mail({
                'tid': tid,
                'address': user_desc['mail_address'],
                'subject': subject,
                'body': body
            }))
Beispiel #53
0
    def __init__(self):
        self.current_time = datetime_now()

        self.difficulty_dict = {'human_captcha': False, 'graph_captcha': False}
Beispiel #54
0
 def __init__(self):
     HourlyJob.__init__(self)
     self.stats_collection_start_time = datetime_now()
Beispiel #55
0
 def ExpirationWatch(self):
     missing_time = ISO8601_to_datetime(
         self.data['tip']['expiration_date']) - datetime_now()
     missing_hours = int(divmod(missing_time.total_seconds(), 3600)[0])
     return str(missing_hours)
Beispiel #56
0
 def test_is_expired(self):
     self.assertFalse(utility.is_expired(None))
     self.assertTrue(utility.is_expired(utility.datetime_null()))
     self.assertTrue(utility.is_expired(utility.datetime_now()))
     self.assertFalse(utility.is_expired(utility.utc_future_date(seconds=1337)))
Beispiel #57
0
def db_create_submission(store, request, uploaded_files, client_using_tor,
                         language):
    answers = request['answers']

    context = store.find(models.Context,
                         models.Context.id == request['context_id']).one()
    if not context:
        raise errors.ContextIdNotFound

    submission = models.InternalTip()

    submission.progressive = db_assign_submission_progressive(store)

    if context.tip_timetolive > -1:
        submission.expiration_date = get_expiration(context.tip_timetolive)
    else:
        submission.expiration_date = datetime_never()

    # this is get from the client as it the only possibility possible
    # that would fit with the end to end submission.
    # the score is only an indicator and not a critical information so we can accept to
    # be fooled by the malicious user.
    submission.total_score = request['total_score']

    # The status tor2web is used to keep track of the security level adopted by the whistleblower
    submission.tor2web = not client_using_tor

    submission.context_id = context.id

    submission.enable_two_way_comments = context.enable_two_way_comments
    submission.enable_two_way_messages = context.enable_two_way_messages
    submission.enable_attachments = context.enable_attachments
    submission.enable_whistleblower_identity = context.questionnaire.enable_whistleblower_identity

    if submission.enable_whistleblower_identity and request[
            'identity_provided']:
        submission.identity_provided = True
        submission.identity_provided_date = datetime_now()

    try:
        questionnaire = db_get_context_steps(store, context.id, None)
        questionnaire_hash = unicode(sha256(json.dumps(questionnaire)))

        submission.questionnaire_hash = questionnaire_hash
        submission.preview = extract_answers_preview(questionnaire, answers)

        store.add(submission)

        db_archive_questionnaire_schema(store, questionnaire,
                                        questionnaire_hash)

        db_save_questionnaire_answers(store, submission.id, answers)
    except Exception as excep:
        log.err("Submission create: fields validation fail: %s" % excep)
        raise excep

    try:
        for filedesc in uploaded_files:
            new_file = models.InternalFile()
            new_file.name = filedesc['name']
            new_file.description = ""
            new_file.content_type = filedesc['type']
            new_file.size = filedesc['size']
            new_file.internaltip_id = submission.id
            new_file.submission = filedesc['submission']
            new_file.file_path = filedesc['path']
            store.add(new_file)
            log.debug("=> file associated %s|%s (%d bytes)" %
                      (new_file.name, new_file.content_type, new_file.size))
    except Exception as excep:
        log.err("Submission create: unable to create db entry for files: %s" %
                excep)
        raise excep

    receipt, wbtip = db_create_whistleblowertip(store, submission)

    if submission.context.maximum_selectable_receivers > 0 and \
                    len(request['receivers']) > submission.context.maximum_selectable_receivers:
        raise errors.SubmissionValidationFailure(
            "selected an invalid number of recipients")

    rtips = []
    for receiver in store.find(models.Receiver,
                               In(models.Receiver.id, request['receivers'])):
        if submission.context not in receiver.contexts:
            continue

        if not GLSettings.memory_copy.allow_unencrypted and len(
                receiver.user.pgp_key_public) == 0:
            continue

        rtips.append(db_create_receivertip(store, receiver, submission))

    if len(rtips) == 0:
        raise errors.SubmissionValidationFailure("need at least one recipient")

    log.debug("The finalized submission had created %d models.ReceiverTip(s)" %
              len(rtips))

    submission_dict = serialize_usertip(store, wbtip, language)

    submission_dict.update({'receipt': receipt})

    return submission_dict
Beispiel #58
0
 def set_itips_near_to_expire(self, session):
     date = datetime_now() + timedelta(hours=self.state.tenant_cache[1].notification.tip_expiration_threshold - 1)
     session.query(models.InternalTip).update({'expiration_date': date})
Beispiel #59
0
def set_reset_token(session, user_id, validation_token):
    user = models.db_get(session, models.User, models.User.id == user_id)
    user.change_email_date = datetime_now()
    user.reset_password_token = validation_token
    session.commit()
Beispiel #60
0
def update_AnomalyQ(event_matrix, alarm_level):
    GLSettings.RecentAnomaliesQ.update(
        {datetime_now(): [event_matrix, alarm_level]})