Пример #1
0
    def ping_mail_flush(self, notification_settings, receivers_synthesis):
        """
        TODO This function should be implemented as a clean and testable plugin in the
        way defined in plugin/base.py and plugin/notification.py, and/or is the opportunity
        to review these classes, at the moment is a simplified version that just create a
        ping email and send it via sendmail.
        """
        for _, data in receivers_synthesis.iteritems():

            receiver_dict, winks = data

            receiver_name = receiver_dict['name']
            receiver_email = receiver_dict['ping_mail_address']

            fakeevent = OD()
            fakeevent.type = u'ping_mail'
            fakeevent.node_info = None
            fakeevent.context_info = None
            fakeevent.receiver_info = receiver_dict
            fakeevent.tip_info = None
            fakeevent.subevent_info = {'counter': winks}

            body = Templating().format_template(
                notification_settings['ping_mail_template'], fakeevent)
            title = Templating().format_template(
                notification_settings['ping_mail_title'], fakeevent)

            # so comfortable for a developer!! :)
            source_mail_name = GLSettings.developer_name if GLSettings.devel_mode \
                else GLSettings.memory_copy.notif_source_name
            message = MIME_mail_build(source_mail_name,
                                      GLSettings.memory_copy.notif_source_email,
                                      receiver_name,
                                      receiver_email,
                                      title,
                                      body)

            fakeevent2 = OD()
            fakeevent2.type = "Ping mail for %s (%d info)" % (receiver_email, winks)

            return sendmail(authentication_username=GLSettings.memory_copy.notif_username,
                            authentication_password=GLSettings.memory_copy.notif_password,
                            from_address= GLSettings.memory_copy.notif_source_email,
                            to_address= [receiver_email],
                            message_file=message,
                            smtp_host=GLSettings.memory_copy.notif_server,
                            smtp_port=GLSettings.memory_copy.notif_port,
                            security=GLSettings.memory_copy.notif_security,
                            event=fakeevent2)
Пример #2
0
    def send_pgp_alerts(self, node_desc, receiver_desc, notification_settings):
        fakeevent = OD()
        fakeevent.type = u'pgp_expiration_alert'
        fakeevent.node_info = node_desc
        fakeevent.context_info = None
        fakeevent.steps_info = None
        fakeevent.receiver_info = receiver_desc
        fakeevent.tip_info = None
        fakeevent.subevent_info = None

        body = Templating().format_template(
            notification_settings['pgp_alert_mail_template'], fakeevent)
        title = Templating().format_template(
            notification_settings['pgp_alert_mail_title'], fakeevent)

        to_address = receiver_desc['mail_address']
        message = MIME_mail_build(GLSetting.memory_copy.notif_source_name,
                                  GLSetting.memory_copy.notif_source_email,
                                  to_address,
                                  to_address,
                                  title,
                                  body)

        yield sendmail(authentication_username=GLSetting.memory_copy.notif_username,
                       authentication_password=GLSetting.memory_copy.notif_password,
                       from_address=GLSetting.memory_copy.notif_source_email,
                       to_address=to_address,
                       message_file=message,
                       smtp_host=GLSetting.memory_copy.notif_server,
                       smtp_port=GLSetting.memory_copy.notif_port,
                       security=GLSetting.memory_copy.notif_security,
                       event=None)
Пример #3
0
def load_complete_events(store, event_number=GLSetting.notification_limit):
    """
    _complete_ is explicit because do not serialize, but make an OD() of the description.

    event_number represent the amount of event that can be returned by the function,
    event to be notified are taken in account later.
    """

    node_desc = db_admin_serialize_node(store, GLSetting.defaults.language)

    event_list = []
    storedevnts = store.find(EventLogs, EventLogs.mail_sent == False)
    storedevnts.order_by(Desc(EventLogs.creation_date))

    for i, stev in enumerate(storedevnts):

        if len(event_list) == event_number:
            log.debug(
                "Maximum number of notification event reach (Mailflush) %d, after %d"
                % (event_number, i))
            break

        if not stev.description['receiver_info']['file_notification'] and \
                        stev.event_reference['kind'] == 'File':
            continue
        if not stev.description['receiver_info']['message_notification'] and \
                        stev.event_reference['kind'] == 'Message':
            continue
        if not stev.description['receiver_info']['comment_notification'] and \
                        stev.event_reference['kind'] == 'Comment':
            continue
        if not stev.description['receiver_info']['tip_notification'] and \
                        stev.event_reference['kind'] == 'Tip':
            continue

        eventcomplete = OD()

        # node level information are not stored in the node, but fetch now
        eventcomplete.notification_settings = admin_serialize_notification(
            store.find(Notification).one(),
            stev.description['receiver_info']['language'])

        eventcomplete.node_info = node_desc

        # event level information are decoded form DB in the old 'Event'|nametuple format:
        eventcomplete.receiver_info = stev.description['receiver_info']
        eventcomplete.tip_info = stev.description['tip_info']
        eventcomplete.subevent_info = stev.description['subevent_info']
        eventcomplete.context_info = stev.description['context_info']
        eventcomplete.steps_info = stev.description['steps_info']

        eventcomplete.type = stev.description['type']  # 'Tip', 'Comment'
        eventcomplete.trigger = stev.event_reference[
            'kind']  # 'plaintext_blah' ...

        eventcomplete.storm_id = stev.id

        event_list.append(eventcomplete)

    return event_list
Пример #4
0
def load_complete_events(store, events_limit=GLSettings.notification_limit):
    """
    This function do not serialize, but make an OD() of the description.
    events_limit represent the amount of event that can be returned by the function,
    events to be notified are taken in account later.
    """
    node_desc = db_admin_serialize_node(store, GLSettings.defaults.language)

    event_list = []
    totaleventinqueue = store.find(EventLogs, EventLogs.mail_sent == False).count()
    storedevnts = store.find(EventLogs, EventLogs.mail_sent == False)[:events_limit * 3]

    debug_event_counter = {}
    for i, stev in enumerate(storedevnts):
        if len(event_list) == events_limit:
            log.debug("Maximum number of notification event reach (Mailflush) %d, after %d" %
                      (events_limit, i))
            break

        debug_event_counter.setdefault(stev.event_reference['kind'], 0)
        debug_event_counter[stev.event_reference['kind']] += 1

        if not stev.description['receiver_info']['tip_notification']:
            continue

        eventcomplete = OD()

        # node level information are not stored in the node, but fetch now
        eventcomplete.notification_settings = admin_serialize_notification(
            store.find(Notification).one(), stev.description['receiver_info']['language']
        )

        eventcomplete.node_info = node_desc

        # event level information are decoded form DB in the old 'Event'|nametuple format:
        eventcomplete.receiver_info = stev.description['receiver_info']
        eventcomplete.tip_info = stev.description['tip_info']
        eventcomplete.subevent_info = stev.description['subevent_info']
        eventcomplete.context_info = stev.description['context_info']

        eventcomplete.type = stev.description['type'] # 'Tip', 'Comment'
        eventcomplete.trigger = stev.event_reference['kind'] # 'blah' ...

        eventcomplete.orm_id = stev.id

        event_list.append(eventcomplete)

    if debug_event_counter:
        if totaleventinqueue > (events_limit * 3):
            log.debug("load_complete_events: %s from %d Events" %
                      (debug_event_counter, totaleventinqueue ))
        else:
            log.debug("load_complete_events: %s from %d Events, with a protection limit of %d" %
                      (debug_event_counter, totaleventinqueue, events_limit * 3 ))

    return event_list
Пример #5
0
    def generate_anomaly_email(self, plausible_event):

        anomalevent = OD()
        anomalevent.type = u'receiver_notification_limit_reached'
        anomalevent.notification_settings = plausible_event.notification_settings
        anomalevent.node_info = plausible_event.node_info
        anomalevent.context_info = None
        anomalevent.receiver_info = plausible_event.receiver_info
        anomalevent.tip_info = None
        anomalevent.subevent_info = None
        anomalevent.orm_id = 0

        return anomalevent
    def test_session_management_sched(self):

        new_session = OD(
            refreshdate=datetime_null(),  # new but expired session!
            id="admin",
            role="admin",
            user_id="admin")

        GLSetting.sessions['111'] = new_session
        GLSetting.sessions['222'] = new_session
        GLSetting.sessions['333'] = new_session

        yield session_management_sched.SessionManagementSchedule().operation()

        self.assertEqual(len(GLSetting.sessions), 0)
Пример #7
0
    def send_pgp_alerts(self, receiver_desc):
        user_language = receiver_desc['language']
        node_desc = yield admin_serialize_node(user_language)
        notification_settings = yield get_notification(user_language)

        fakeevent = OD()
        fakeevent.type = u'pgp_expiration_alert'
        fakeevent.node_info = node_desc
        fakeevent.context_info = None
        fakeevent.receiver_info = receiver_desc
        fakeevent.tip_info = None
        fakeevent.subevent_info = None

        subject = Templating().format_template(
            notification_settings['pgp_alert_mail_title'], fakeevent)
        body = Templating().format_template(
            notification_settings['pgp_alert_mail_template'], fakeevent)

        yield sendmail(receiver_desc['mail_address'], subject, body)
Пример #8
0
    def send_admin_pgp_alerts(self, admin_desc, expired_or_expiring):
        user_language = admin_desc['language']
        node_desc = yield admin_serialize_node(user_language)
        notification_settings = yield get_notification(user_language)

        fakeevent = OD()
        fakeevent.type = u'admin_pgp_expiration_alert'
        fakeevent.node_info = node_desc
        fakeevent.context_info = None
        fakeevent.receiver_info = None
        fakeevent.tip_info = None
        fakeevent.subevent_info = {'expired_or_expiring': expired_or_expiring}

        subject = Templating().format_template(
            notification_settings['admin_pgp_alert_mail_title'], fakeevent)
        body = Templating().format_template(
            notification_settings['admin_pgp_alert_mail_template'], fakeevent)

        admin_users = yield get_admin_users()
        for u in admin_users:
            yield sendmail(u['mail_address'], subject, body)
Пример #9
0
    def __init__(self):

        if GLSettingssClass.initialized:
            error_msg = "Singleton GLSettingsClass instanced twice!"
            raise Exception(error_msg)
        else:
            GLSettingssClass.initialized = True

        # command line parsing utils
        self.parser = OptionParser()
        self.cmdline_options = None

        # version
        self.version_string = __version__

        # daemon
        self.nodaemon = False

        # threads sizes
        self.db_thread_pool_size = 1

        self.bind_addresses = '127.0.0.1'

        # bind port
        self.bind_port = 8082

        # store name
        self.store_name = 'main_store'

        self.db_type = 'sqlite'
        # Database version tracking
        self.db_version = DATABASE_VERSION

        # debug defaults
        self.storm_debug = False
        self.http_log = -1
        self.http_requests_counter = 0
        self.loglevel = "CRITICAL"

        # files and paths
        self.root_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..'))
        self.pid_path = '/var/run/globaleaks'
        self.working_path = '/var/globaleaks'
        self.static_source = '/usr/share/globaleaks/glbackend'
        self.glclient_path = '/usr/share/globaleaks/glclient'

        self.set_ramdisk_path()

        # list of plugins available in the software
        self.notification_plugins = [
            'MailNotification',
        ]

        self.default_password = '******'

        # some singleton classes: sessions and some event queues
        self.sessions = {}
        self.RecentEventQ = []
        self.RecentAnomaliesQ = {}

        # statistical, referred to latest period
        # and resetted by session_management sched
        self.failed_login_attempts = 0

        # download tocken trackin
        self.download_tokens = dict()

        # static file rules
        self.staticfile_regexp = r'(.*)'
        self.staticfile_overwrite = False
        self.reserved_names = OD()
        self.reserved_names.logo = "globaleaks_logo"
        self.reserved_names.css = "custom_stylesheet"
        self.reserved_names.html = "custom_homepage"

        # acceptable 'Host:' header in HTTP request
        self.accepted_hosts = "127.0.0.1,localhost"

        self.receipt_regexp = u'[0-9]{16}'

        # default timings for scheduled jobs
        self.session_management_delta = 60
        self.notification_delta = 120
        self.delivery_delta = 20
        self.anomaly_delta = 10
        self.mailflush_delta = 300

        # Default values, used to initialize DB at the first start,
        # or whenever the value is not supply by client.
        # These value are then stored in the single instance
        # (Node, Receiver or Context) and then can be updated by
        # the admin using the Admin interface (advanced settings)
        self.defaults = OD()

        # default tor2web_admin setting is set to True;
        # the setting is then switched based on automatic user detection during wizard:
        #
        # - if the admin performs the wizard via tor2web the permission is kept True
        # - if the admin performs the wizard via Tor the permission is set to False
        self.defaults.tor2web_admin = True

        self.defaults.tor2web_submission = False
        self.defaults.tor2web_receiver = False
        self.defaults.tor2web_unauth = True
        self.defaults.allow_unencrypted = False
        self.defaults.allow_iframes_inclusion = False
        self.defaults.maximum_namesize = 128
        self.defaults.maximum_textsize = 4096
        self.defaults.maximum_filesize = 30  # expressed in megabytes
        self.defaults.maximum_requestsize = 4  # expressed in megabytes
        self.defaults.exception_email = u"*****@*****.**"

        self.defaults.submission_minimum_delay = 10
        self.defaults.submission_maximum_ttl = 10800

        # This value get copy in Context(s):
        self.defaults.tip_seconds_of_life = (3600 * 24) * 15

        self.defaults.language = u'en'
        self.defaults.languages_enabled = LANGUAGES_SUPPORTED_CODES

        self.defaults.timezone = 0
        self.defaults.landing_page = 'homepage'

        self.defaults.disable_mail_notification = True
        self.defaults.notif_server = None
        self.defaults.notif_port = None
        self.defaults.notif_username = None
        self.defaults.notif_security = None
        self.defaults.notif_uses_tor = None
        self.defaults.notif_hours_before_expiration = 72

        # this became false when, few MBs cause node to disable submissions
        self.defaults.accept_submissions = True
        self.defaults.minimum_megabytes_required = 1024  # 1 GB, or the node is disabled

        # a dict to keep track of the lifetime of the session. at the moment
        # not exported in the UI.
        # https://github.com/globaleaks/GlobaLeaks/issues/510
        self.defaults.lifetimes = {
            'admin': (60 * 60),
            'receiver': (60 * 60),
            'wb': (60 * 60)
        }

        # A lot of operations performed massively by globaleaks
        # should avoid to fetch continuously variables from the DB so that
        # it is important to keep this variables in memory
        #
        # To this aim a variable memory_copy is instantiated as a copy of
        # self.defaults and then initialized and updated after
        # create_tables() and for every node+notif update
        self.memory_copy = OD(self.defaults)

        # Default delay threshold
        self.delay_threshold = 0.800

        # unchecked_tor_input contains information that cannot be validated now
        # due to complex inclusions or requirements. Data is used in
        # globaleaks.db.datainit.apply_cli_options()
        self.unchecked_tor_input = {}

        # SOCKS default
        self.socks_host = "127.0.0.1"
        self.socks_port = 9050

        self.notification_limit = 30

        self.user = getpass.getuser()
        self.group = getpass.getuser()
        self.uid = os.getuid()
        self.gid = os.getgid()
        self.start_clean = False
        self.devel_mode = False
        self.developer_name = ''
        self.skip_wizard = False
        self.glc_path = None

        # Number of failed login enough to generate an alarm
        self.failed_login_alarm = 5

        # Number of minutes in which a user is prevented to login in case of triggered alarm
        self.failed_login_block_time = 5

        # Alarm to be ignored: can be raise with the -A command line switch
        self.disk_alarm_threshold = 0

        # Size in bytes of every log file. Once this size is reached the
        # logfile is rotated.
        # Default: 1M
        self.log_file_size = 1000000
        # Number of log files to conserve.
        self.maximum_rotated_log_files = 100

        # size used while streaming files
        self.file_chunk_size = 8192

        # Disk file encryption in realtime
        # if the key is fine or is not.
        # this key permit Globaleaks to resist on application restart
        # not to a reboot! (is written in GLSettings.
        # key is initialized and stored in key path.
        # key_id contains an identifier of the key (when system reboots,
        # key changes.
        ### you can read more about this security measure in the document:
        ### https://github.com/globaleaks/GlobaLeaks/wiki/Encryption
        self.AES_key_size = 32
        # This key_id is just to identify the keys, and is generated with
        self.AES_key_id_regexp = u'[A-Za-z0-9]{16}'
        self.AES_counter_nonce = 128 / 8
        self.AES_file_regexp = r'(.*)\.aes'
        self.AES_file_regexp_comp = re.compile(self.AES_file_regexp)
        self.AES_keyfile_prefix = "aeskey-"

        self.exceptions = {}
        self.exceptions_email_count = 0
        self.exceptions_email_hourly_limit = 20

        # Extreme debug option triggered by --XXX, that's are the defaults
        self.debug_option_in_the_future = 0
        self.debug_option_UUID_human = ""
        self.debug_UUID_human_counter = 0
        self.debug_option_mlockall = False

        self.disable_mail_torification = False
        self.disable_mail_notification = False
        self.disable_backend_exception_notification = False
        self.disable_client_exception_notification = False
Пример #10
0
    def __init__(self):
        # command line parsing utils
        self.parser = OptionParser()
        self.cmdline_options = None

        # version
        self.version_string = __version__

        # testing
        # This variable is to be able to hook/bypass code when unit-tests are runned
        self.testing = False

        # daemon
        self.nodaemon = False

        # thread pool size of 1
        self.orm_tp = ThreadPool(0, 1)

        self.bind_addresses = '127.0.0.1'

        # bind port
        self.bind_port = 8082

        # store name
        self.store_name = 'main_store'

        self.db_type = 'sqlite'

        # debug defaults
        self.orm_debug = False
        self.log_requests_responses = -1
        self.requests_counter = 0
        self.loglevel = "CRITICAL"

        # files and paths
        self.root_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
        self.pid_path = '/var/run/globaleaks'
        self.working_path = '/var/globaleaks'
        self.static_source = '/usr/share/globaleaks/data'

        self.client_path = '/usr/share/globaleaks/client'
        for path in possible_client_paths:
            if os.path.exists(path):
                self.client_path = path
                break

        self.set_ramdisk_path()

        self.default_password = '******'

        # some singleton classes: sessions and some event queues
        self.authentication_lifetime = 3600
        self.sessions = TempDict(timeout=self.authentication_lifetime)
        self.RecentEventQ = []
        self.RecentAnomaliesQ = {}

        self.accept_submissions = True

        # statistical, referred to latest period
        # and resetted by session_management sched
        self.failed_login_attempts = 0

        # static file rules
        self.staticfile_regexp = r'(.*)'
        self.staticfile_overwrite = False

        self.reserved_names = OD({
          'logo': 'logo',
          'css': 'custom_stylesheet',
          'html': 'custom_homepage'
        })

        # acceptable 'Host:' header in HTTP request
        self.accepted_hosts = "127.0.0.1, localhost"
        self.configured_hosts = []

        self.receipt_regexp = u'[0-9]{16}'

        # A lot of operations performed massively by globaleaks
        # should avoid to fetch continuously variables from the DB so that
        # it is important to keep this variables in memory
        #
        # The following initialization is needed only for variables that need
        # to be used in the startup queries, after that memory_copy is
        # initialized with the content Node table.
        self.memory_copy = OD({
            'maximum_namesize': 128,
            'maximum_textsize': 4096,
            'maximum_filesize': 30,
            'allow_iframes_inclusion': False,
            'tor2web_access': {
                'admin': True,
                'whistleblower': False,
                'custodian': False,
                'receiver': False,
                'unauth': True
            }
        })

        # Default request time uniform value
        self.side_channels_guard = 0.150

        # unchecked_tor_input contains information that cannot be validated now
        # due to complex inclusions or requirements. Data is used in
        # globaleaks.db.appdata.apply_cli_options()
        self.unchecked_tor_input = {}

        # SOCKS default
        self.socks_host = "127.0.0.1"
        self.socks_port = 9050

        self.notification_limit = 30
        self.jobs_operation_limit = 20

        self.user = getpass.getuser()
        self.group = getpass.getuser()
        self.uid = os.getuid()
        self.gid = os.getgid()
        self.start_clean = False
        self.devel_mode = False
        self.developer_name = ''
        self.skip_wizard = False
        self.log_timing_stats = False

        # Number of failed login enough to generate an alarm
        self.failed_login_alarm = 5

        # Number of minutes in which a user is prevented to login in case of triggered alarm
        self.failed_login_block_time = 5

        # Alarm to be ignored: can be raise with the -A command line switch
        self.disk_alarm_threshold = 0

        # Size in bytes of every log file. Once this size is reached the
        # logfile is rotated.
        # Default: 1M
        self.log_file_size = 1000000
        # Number of log files to conserve.
        self.maximum_rotated_log_files = 100

        # size used while streaming files
        self.file_chunk_size = 8192

        self.AES_key_size = 32
        self.AES_key_id_regexp = u'[A-Za-z0-9]{16}'
        self.AES_counter_nonce = 128 / 8
        self.AES_file_regexp = r'(.*)\.aes'
        self.AES_file_regexp_comp = re.compile(self.AES_file_regexp)
        self.AES_keyfile_prefix = "aeskey-"

        self.exceptions = {}
        self.exceptions_email_count = 0
        self.exceptions_email_hourly_limit = 20

        # Extreme debug options triggered by --XXX, that's are the defaults
        self.debug_option_in_the_future = 0
        self.debug_option_UUID_human = ""
        self.debug_UUID_human_counter = 0
        self.debug_option_mlockall = False

        self.disable_mail_torification = False
        self.disable_mail_notification = False
        self.disable_backend_exception_notification = False
        self.disable_client_exception_notification = False

        self.enable_input_length_checks = True

        self.mail_counters = {}
        self.mail_timeout = 15 # seconds
        self.mail_attempts_limit = 3 # per mail limit

        reactor.addSystemEventTrigger('after', 'shutdown', self.orm_tp.stop)
        self.orm_tp.start()
Пример #11
0
    def request(self,
                jbody=None,
                role=None,
                user_id=None,
                headers=None,
                body='',
                remote_ip='0.0.0.0',
                method='MOCK',
                kwargs={}):
        """
        Function userful for performing mock requests.

        Args:

            jbody:
                The body of the request as a dict (it will be automatically
                converted to string)

            body:
                The body of the request as a string

            role:
                If we should perform authentication role can be either "admin",
                "receiver" or "wb"

            user_id:
                If when performing authentication the session should be bound
                to a certain user_id.

            method:
                HTTP method, e.g. "GET" or "POST"

            uri:
                URL to fetch

            role:
                the role

            headers:
                (dict or :class:`cyclone.httputil.HTTPHeaders` instance) HTTP
                headers to pass on the request

            remote_ip:
                If a particular remote_ip should be set.

        """
        if jbody and not body:
            body = json.dumps(jbody)
        elif body and jbody:
            raise ValueError('jbody and body in conflict')

        application = Application([])

        tr = proto_helpers.StringTransport()
        connection = httpserver.HTTPConnection()
        connection.factory = application
        connection.makeConnection(tr)

        request = httpserver.HTTPRequest(uri='mock',
                                         method=method,
                                         headers=headers,
                                         body=body,
                                         remote_ip=remote_ip,
                                         connection=connection)

        handler = self._handler(application, request, **kwargs)

        def mock_pass(cls, *args):
            pass

        # so that we don't complain about XSRF
        handler.check_xsrf_cookie = mock_pass

        if role:
            session_id = '4tehlulz'
            new_session = OD(refreshdate=datetime_now(),
                             id=session_id,
                             role=role,
                             user_id=user_id)
            GLSetting.sessions[session_id] = new_session
            handler.request.headers['X-Session'] = session_id
        return handler
Пример #12
0
    def __init__(self):
        # command line parsing utils
        self.parser = OptionParser()
        self.cmdline_options = None

        # version
        self.version_string = __version__

        # testing
        # This variable is to be able to hook/bypass code when unit-tests are runned
        self.testing = False

        # daemon
        self.nodaemon = False

        # thread pool size of 1
        self.orm_tp = ThreadPool(0, 1)

        self.bind_addresses = '127.0.0.1'

        # bind port
        self.bind_port = 8082

        # store name
        self.store_name = 'main_store'

        self.db_type = 'sqlite'
        self.initialize_db = True

        # debug defaults
        self.orm_debug = False
        self.log_requests_responses = -1
        self.requests_counter = 0
        self.loglevel = "CRITICAL"

        # files and paths
        self.root_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..'))
        self.pid_path = '/var/run/globaleaks'
        self.working_path = '/var/globaleaks'

        self.client_path = '/usr/share/globaleaks/client'
        for path in possible_client_paths:
            if os.path.exists(path):
                self.client_path = path
                break

        self.set_ramdisk_path()

        self.authentication_lifetime = 3600
        self.RecentEventQ = []
        self.RecentAnomaliesQ = {}

        self.accept_submissions = True

        # statistical, referred to latest period
        # and resetted by session_management sched
        self.failed_login_attempts = 0

        # static file rules
        self.staticfile_regexp = r'(.*)'
        self.staticfile_overwrite = False

        # acceptable 'Host:' header in HTTP request
        self.accepted_hosts = "127.0.0.1, localhost"
        self.tor_address = None

        self.receipt_regexp = u'[0-9]{16}'

        # A lot of operations performed massively by globaleaks
        # should avoid to fetch continuously variables from the DB so that
        # it is important to keep this variables in memory
        #
        # Initialization is handled by db_refresh_memory_variables
        self.memory_copy = OD({
            'maximum_namesize': 128,
            'maximum_textsize': 4096,
            'maximum_filesize': 30,
            'allow_iframes_inclusion': False,
            'accept_tor2web_access': {
                'admin': True,
                'whistleblower': False,
                'custodian': False,
                'receiver': False,
                'unauth': True,
            },
        })

        # Default request time uniform value
        self.side_channels_guard = 0.150

        # SOCKS default
        self.socks_host = "127.0.0.1"
        self.socks_port = 9050

        self.notification_limit = 30
        self.jobs_operation_limit = 20

        self.user = getpass.getuser()
        self.group = getpass.getuser()
        self.uid = os.getuid()
        self.gid = os.getgid()
        self.start_clean = False
        self.devel_mode = False
        self.developer_name = ''
        self.disable_swap = False
        self.skip_wizard = False
        self.log_timing_stats = False

        # Number of failed login enough to generate an alarm
        self.failed_login_alarm = 5

        # Number of minutes in which a user is prevented to login in case of triggered alarm
        self.failed_login_block_time = 5

        # Alarm to be ignored: can be raise with the -A command line switch
        self.disk_alarm_threshold = 0

        # Limit for log sizes and number of log files
        # https://github.com/globaleaks/GlobaLeaks/issues/1578
        self.log_size = 10000000  # 10MB
        self.log_file_size = 1000000  # 1MB
        self.num_log_files = self.log_size / self.log_file_size

        # size used while streaming files
        self.file_chunk_size = 1000000  # 1MB

        self.AES_key_size = 32
        self.AES_key_id_regexp = u'[A-Za-z0-9]{16}'
        self.AES_counter_nonce = 128 / 8
        self.AES_file_regexp = r'(.*)\.aes'
        self.AES_file_regexp_comp = re.compile(self.AES_file_regexp)
        self.AES_keyfile_prefix = "aeskey-"

        self.exceptions = {}
        self.exceptions_email_count = 0
        self.exceptions_email_hourly_limit = 20

        self.disable_mail_torification = False
        self.disable_mail_notification = False
        self.disable_backend_exception_notification = False
        self.disable_client_exception_notification = False

        self.enable_input_length_checks = True

        self.mail_counters = {}
        self.mail_timeout = 15  # seconds
        self.mail_attempts_limit = 3  # per mail limit

        reactor.addSystemEventTrigger('after', 'shutdown', self.orm_tp.stop)
        self.orm_tp.start()
Пример #13
0
# -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE

import sys
import os
import logging

from twisted.python import log as txlog
from twisted.python.logfile import DailyLogFile

from cyclone.util import ObjectDict as OD

config = OD()
config.debug = True

# XXX make this a config option
log_file = "/tmp/bridgeherder.log"

log_folder = os.path.join('/', *log_file.split('/')[:-1])
log_filename = log_file.split('/')[-1]
daily_logfile = DailyLogFile(log_filename, log_folder)


class LoggerFactory(object):
    def __init__(self, options):
        #print options
        pass

    def start(self, application):
Пример #14
0
    def __init__(self):
        if GLSettingssClass.initialized:
            error_msg = "Singleton GLSettingsClass instanced twice!"
            raise Exception(error_msg)
        else:
            GLSettingssClass.initialized = True

        # command line parsing utils
        self.parser = OptionParser()
        self.cmdline_options = None

        # version
        self.version_string = __version__

        # testing
        # This variable is to be able to hook/bypass code when unit-tests are runned
        self.testing = False

        # daemon
        self.nodaemon = False

        # thread pool size of 1
        self.orm_tp = ThreadPool(0, 1)

        self.bind_addresses = '127.0.0.1'

        # bind port
        self.bind_port = 8082

        # store name
        self.store_name = 'main_store'

        self.db_type = 'sqlite'

        # debug defaults
        self.orm_debug = False
        self.log_requests_responses = -1
        self.requests_counter = 0
        self.loglevel = "CRITICAL"

        # files and paths
        self.root_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..'))
        self.pid_path = '/var/run/globaleaks'
        self.working_path = '/var/globaleaks'

        self.static_source = '/usr/share/globaleaks/backend'

        self.client_path = '/usr/share/globaleaks/client'
        for path in possible_client_paths:
            if os.path.exists(path):
                self.client_path = path
                break

        self.set_ramdisk_path()

        self.default_password = '******'

        # some singleton classes: sessions and some event queues
        self.sessions = {}
        self.RecentEventQ = []
        self.RecentAnomaliesQ = {}

        self.accept_submissions = True

        # statistical, referred to latest period
        # and resetted by session_management sched
        self.failed_login_attempts = 0

        # download tocken trackin
        self.download_tokens = dict()

        # static file rules
        self.staticfile_regexp = r'(.*)'
        self.staticfile_overwrite = False

        self.reserved_names = OD({
            'logo': 'globaleaks_logo',
            'css': 'custom_stylesheet',
            'html': 'custom_homepage'
        })

        # acceptable 'Host:' header in HTTP request
        self.accepted_hosts = "127.0.0.1, localhost"
        self.configured_hosts = []

        self.receipt_regexp = u'[0-9]{16}'

        # default timings for scheduled jobs
        self.session_management_delta = 60
        self.notification_delta = 60
        self.delivery_delta = 20
        self.anomaly_delta = 10
        self.mailflush_delta = 300
        self.secure_file_delete_delta = 3600

        # Default values, used to initialize DB at the first start,
        # or whenever the value is not supply by client.
        # These value are then stored in the single instance
        # (Node, Receiver or Context) and then can be updated by
        # the admin using the Admin interface (advanced settings)
        self.defaults = OD()

        self.defaults.tor2web_access = {}

        # default tor2web_admin setting is set to True;
        # the setting is then switched based on automatic user detection during wizard:
        #
        # - if the admin performs the wizard via tor2web the permission is kept True
        # - if the admin performs the wizard via Tor the permission is set to False
        self.defaults.tor2web_access = {
            'admin': True,
            'whistleblower': False,
            'custodian': False,
            'receiver': False,
            'unauth': True
        }

        self.defaults.timezone = 0

        self.defaults.authentication_lifetime = 60 * 60

        self.defaults.maximum_namesize = 128
        self.defaults.maximum_textsize = 4096
        self.defaults.maximum_filesize = 30

        # A lot of operations performed massively by globaleaks
        # should avoid to fetch continuously variables from the DB so that
        # it is important to keep this variables in memory
        #
        # To this aim a variable memory_copy is instantiated as a copy of
        # self.defaults and then initialized and updated after
        # create_tables() and for every node+notif update
        self.memory_copy = OD(self.defaults)

        # Default request time uniform value
        self.side_channels_guard = 0.150

        # unchecked_tor_input contains information that cannot be validated now
        # due to complex inclusions or requirements. Data is used in
        # globaleaks.db.appdata.apply_cli_options()
        self.unchecked_tor_input = {}

        # SOCKS default
        self.socks_host = "127.0.0.1"
        self.socks_port = 9050

        self.notification_limit = 30
        self.jobs_operation_limit = 20

        self.user = getpass.getuser()
        self.group = getpass.getuser()
        self.uid = os.getuid()
        self.gid = os.getgid()
        self.start_clean = False
        self.devel_mode = False
        self.developer_name = ''
        self.skip_wizard = False
        self.log_timing_stats = False

        # Number of failed login enough to generate an alarm
        self.failed_login_alarm = 5

        # Number of minutes in which a user is prevented to login in case of triggered alarm
        self.failed_login_block_time = 5

        # Alarm to be ignored: can be raise with the -A command line switch
        self.disk_alarm_threshold = 0

        # Size in bytes of every log file. Once this size is reached the
        # logfile is rotated.
        # Default: 1M
        self.log_file_size = 1000000
        # Number of log files to conserve.
        self.maximum_rotated_log_files = 100

        # size used while streaming files
        self.file_chunk_size = 8192

        # Disk file encryption in realtime
        # if the key is fine or is not.
        # this key permit Globaleaks to resist on application restart
        # not to a reboot! (is written in GLSettings.
        # key is initialized and stored in key path.
        # key_id contains an identifier of the key (when system reboots,
        # key changes.
        ### you can read more about this security measure in the document:
        ### https://github.com/globaleaks/GlobaLeaks/wiki/Encryption
        self.AES_key_size = 32
        # This key_id is just to identify the keys, and is generated with
        self.AES_key_id_regexp = u'[A-Za-z0-9]{16}'
        self.AES_counter_nonce = 128 / 8
        self.AES_file_regexp = r'(.*)\.aes'
        self.AES_file_regexp_comp = re.compile(self.AES_file_regexp)
        self.AES_keyfile_prefix = "aeskey-"

        self.exceptions = {}
        self.exceptions_email_count = 0
        self.exceptions_email_hourly_limit = 20

        # Extreme debug option triggered by --XXX, that's are the defaults
        self.debug_option_in_the_future = 0
        self.debug_option_UUID_human = ""
        self.debug_UUID_human_counter = 0
        self.debug_option_mlockall = False

        self.disable_mail_torification = False
        self.disable_mail_notification = False
        self.disable_backend_exception_notification = False
        self.disable_client_exception_notification = False

        self.enable_input_length_checks = True

        self.mail_counters = {}
        self.mail_timeout = 15  # seconds
        self.mail_attempts_limit = 3  # per mail limit

        reactor.addSystemEventTrigger('after', 'shutdown', self.orm_tp.stop)
        self.orm_tp.start()
Пример #15
0
    def __init__(self):

        if GLSettingsClass.initialized:
            error_msg = "Singleton GLSettingClass instanced twice!"
            raise Exception(error_msg)
        else:
            GLSettingsClass.initialized = True

        # command line parsing utils
        self.parser = OptionParser()
        self.cmdline_options = None

        # version
        self.version_string = __version__

        # daemon
        self.nodaemon = False

        # threads sizes
        self.db_thread_pool_size = 1

        self.bind_addresses = '127.0.0.1'

        # bind port
        self.bind_port = 8082

        # store name
        self.store_name = 'main_store'

        # Database variables for MYSQL
        self.db_username = '******'
        self.db_password = '******'
        self.db_hostname = 'localhost'
        # Can either be sqlite or mysql
        self.db_type = 'sqlite'
        # Database version tracking
        self.db_version = DATABASE_VERSION

        # debug defaults
        self.storm_debug = False
        self.http_log = -1
        self.http_log_counter = 0
        self.loglevel = "CRITICAL"

        # files and paths
        self.root_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), '..'))
        self.pid_path = '/var/run/globaleaks'
        self.working_path = '/var/globaleaks'
        self.static_source = '/usr/share/globaleaks/glbackend'
        self.glclient_path = '/usr/share/globaleaks/glclient'
        self.ramdisk_path = '/dev/shm/globaleaks'
        if not os.path.isdir(self.ramdisk_path):
            self.ramdisk_path = tempfile.mkdtemp()

        # list of plugins available in the software
        self.notification_plugins = [
            'MailNotification',
        ]

        # session tracking, in the singleton classes
        self.sessions = dict()
        self.failed_login_attempts = 0  # statisticals, referred to latest_period
        # and resetted by session_management sched

        # download tocken trackin
        self.download_tokens = dict()

        # static file rules
        self.staticfile_regexp = r'(.*)'
        self.staticfile_overwrite = False
        self.images_extensions = (".jpg", ".jpeg", ".png", ".gif")
        self.css_extensions = ".css"
        self.reserved_names = OD()
        self.reserved_names.logo = "globaleaks_logo"
        self.reserved_names.css = "custom_stylesheet"

        # acceptable 'Host:' header in HTTP request
        self.accepted_hosts = "127.0.0.1,localhost"

        # default timings for scheduled jobs
        self.session_management_minutes_delta = 1  # runner.py function expects minutes
        self.cleaning_hours_delta = 6  # runner.py function expects hours
        self.notification_minutes_delta = 2  # runner.py function expects minutes
        self.delivery_seconds_delta = 20  # runner.py function expects seconds
        self.anomaly_seconds_delta = 30  # runner.py function expects seconds
        self.stats_minutes_delta = 10  # runner.py function expects minutes
        self.pgp_check_hours_delta = 24  # runner.py function expects hours

        self.www_form_urlencoded_maximum_size = 1024

        self.defaults = OD()
        # Default values, used to initialize DB at the first start,
        # or whenever the value is not supply by client.
        # These value are then stored in the single instance
        # (Node, Receiver or Context) and then can be updated by
        # the admin using the Admin interface (advanced settings)
        self.defaults.allow_unencrypted = False
        self.defaults.tor2web_admin = False
        self.defaults.tor2web_submission = False
        self.defaults.tor2web_receiver = False
        self.defaults.tor2web_unauth = True
        self.defaults.anomaly_checks = False
        self.defaults.maximum_namesize = 128
        self.defaults.maximum_textsize = 4096
        self.defaults.maximum_filesize = 30  # expressed in megabytes
        self.defaults.exception_email = u"*****@*****.**"
        # Context dependent values:
        self.defaults.receipt_regexp = u'[0-9]{16}'
        self.defaults.tip_seconds_of_life = (3600 * 24) * 15
        self.defaults.submission_seconds_of_life = (3600 * 24) * 3
        self.defaults.languages_enabled = ['en']

        self.memory_copy = OD()
        # Some operation, like check for maximum file, can't access
        # to the DB every time. So when some Node values are updated
        # here are copied, in order to permit a faster comparison
        self.memory_copy.maximum_filesize = self.defaults.maximum_filesize
        self.memory_copy.maximum_textsize = self.defaults.maximum_textsize
        self.memory_copy.maximum_namesize = self.defaults.maximum_namesize
        self.memory_copy.allow_unencrypted = self.defaults.allow_unencrypted
        self.memory_copy.tor2web_admin = self.defaults.tor2web_admin
        self.memory_copy.tor2web_submission = self.defaults.tor2web_submission
        self.memory_copy.tor2web_receiver = self.defaults.tor2web_receiver
        self.memory_copy.tor2web_unauth = self.defaults.tor2web_unauth
        self.memory_copy.anomaly_checks = self.defaults.anomaly_checks
        self.memory_copy.exception_email = self.defaults.exception_email
        # updated by globaleaks/db/__init__.import_memory_variables
        self.memory_copy.default_language = 'en'
        self.memory_copy.notif_server = None
        self.memory_copy.notif_port = None
        self.memory_copy.notif_username = None
        self.memory_copy.notif_security = None
        # import_memory_variables is called after create_tables and node+notif updating

        self.anomalies_counter = dict(external_counted_events)
        # this dict keep track of some 'external' events and is
        # cleaned periodically (10 minutes in stats)
        self.anomalies_list = []
        # this is the collection of the messages shall be reported to the admin
        self.anomalies_messages = []
        # maximum amount of element riported by /admin/anomalies and /admin/stats
        self.anomalies_report_limit = 20

        # Default delay threshold
        self.delay_threshold = 0.800

        # a dict to keep track of the lifetime of the session. at the moment
        # not exported in the UI.
        # https://github.com/globaleaks/GlobaLeaks/issues/510
        self.defaults.lifetimes = {
            'admin': (60 * 60),
            'receiver': (60 * 60),
            'wb': (60 * 60)
        }

        # unchecked_tor_input contains information that cannot be validated now
        # due to complex inclusions or requirements. Data is used in
        # globaleaks.db.datainit.apply_cli_options()
        self.unchecked_tor_input = {}

        # SOCKS default
        self.socks_host = "127.0.0.1"
        self.socks_port = 9050
        self.tor_socks_enable = True

        # https://github.com/globaleaks/GlobaLeaks/issues/647
        # we've struck a notification settings in a server, due to an
        # error looping thru email. A temporary way to disable mail
        # is put here. A globaleaks restart cause the email to restart.
        self.notification_temporary_disable = False
        self.notification_limit = 30

        self.user = getpass.getuser()
        self.group = getpass.getuser()
        self.uid = os.getuid()
        self.gid = os.getgid()
        self.start_clean = False
        self.devel_mode = False
        self.skip_wizard = False
        self.glc_path = None

        # Number of failed login enough to generate an alarm
        self.failed_login_alarm = 5

        # Number of minutes in which a user is prevented to login in case of triggered alarm
        self.failed_login_block_time = 5

        # Size in bytes of every log file. Once this size is reached the
        # logfile is rotated.
        # Default: 1M
        self.log_file_size = 1000000
        # Number of log files to conserve.
        self.maximum_rotated_log_files = 100

        # Disk file encryption in realtime
        # if the key is fine or is not.
        # this key permit Globaleaks to resist on application restart
        # not to a reboot! (is written in GLSetting.
        # key is initialized and stored in key path.
        # key_id contains an identifier of the key (when system reboots,
        # key changes.
        ### you can read more about this security measure in the document:
        ### https://github.com/globaleaks/GlobaLeaks/wiki/Encryption
        self.AES_key_size = 32
        # This key_id is just to identify the keys, and is generated with
        self.AES_key_id_regexp = u'[A-Za-z0-9]{16}'
        self.AES_counter_nonce = 128 / 8
        self.AES_file_regexp = r'(.*)\.aes'
        self.AES_file_regexp_comp = re.compile(self.AES_file_regexp)
        self.AES_keyfile_prefix = "aeskey-"

        self.exceptions = {}

        # Extreme debug option triggered by --XXX, that's are the defaults
        self.debug_option_in_the_future = 0
        self.debug_option_UUID_human = ""
        self.debug_UUID_human_counter = 0
        self.debug_option_mlockall = False