def process_mail_creation(self, store, data):
        # https://github.com/globaleaks/GlobaLeaks/issues/798
        # TODO: the current solution is global and configurable only by the admin
        receiver_id = data['receiver']['id']
        sent_emails = GLSettings.get_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.debug(
                "Discarding emails for receiver %s due to threshold already exceeded for the current hour"
                % receiver_id)
            return

        GLSettings.increment_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.info(
                "Reached threshold of %d emails with limit of %d for receiver %s"
                % (sent_emails,
                   GLSettings.memory_copy.notification_threshold_per_hour,
                   receiver_id))

            # simply changing the type of the notification causes
            # to send the notification_limit_reached
            data['type'] = u'receiver_notification_limit_reached'

        data['notification'] = db_get_notification(
            store, data['receiver']['language'])
        data['node'] = db_admin_serialize_node(store,
                                               data['receiver']['language'])

        if not data['node']['allow_unencrypted'] and data['receiver'][
                'pgp_key_status'] != u'enabled':
            return

        subject, body = Templating().get_mail_subject_and_body(data)

        # If the receiver has encryption enabled encrypt the mail body
        if data['receiver']['pgp_key_status'] == u'enabled':
            gpob = GLBPGP()

            try:
                gpob.load_key(data['receiver']['pgp_key_public'])
                body = gpob.encrypt_message(
                    data['receiver']['pgp_key_fingerprint'], body)
            except Exception as excep:
                log.err(
                    "Error in PGP interface object (for %s: %s)! (notification+encryption)"
                    % (data['receiver']['username'], str(excep)))

                return
            finally:
                # the finally statement is always called also if
                # except contains a return or a raise
                gpob.destroy_environment()

        mail = models.Mail({
            'address': data['receiver']['mail_address'],
            'subject': subject,
            'body': body
        })

        store.add(mail)
Ejemplo n.º 2
0
    def migrate_Context(self):
        old_objs = self.store_old.find(self.model_from['Context'])
        for old_obj in old_objs:
            new_obj = self.model_to['Context']()
            for _, v in new_obj._storm_columns.iteritems():
                if v.name == 'tip_timetolive':
                    # NOTE hardcoded policy. . . .
                    tip_ttl = 5*365
                    if old_obj.tip_timetolive > tip_ttl:
                        GLSettings.print_msg('[WARNING] Found an expiration date longer than 5 years! Configuring tips to never expire.')
                        # If data retention was larger than 5 years the intended goal was
                        # probably to keep the submission around forever.
                        new_obj.tip_timetolive = -1
                    elif old_obj.tip_timetolive < -1:
                        GLSettings.print_msg('[WARNING] Found a negative tip expiration! Configuring tips to never expire.')
                        new_obj.tip_timetolive = -1
                    else:
                        new_obj.tip_timetolive = old_obj.tip_timetolive
                    continue

                elif v.name == 'enable_rc_to_wb_files':
                    new_obj.enable_rc_to_wb_files = False
                    continue

                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)
Ejemplo n.º 3
0
    def _deferred_start(self):
        ret = update_db()

        if ret == -1:
            reactor.stop()

        if ret == 0:
            init_db()

        sync_clean_untracked_files()
        sync_refresh_memory_variables()

        GLSettings.orm_tp.start()

        reactor.addSystemEventTrigger('before', 'shutdown', self.shutdown)

        arw = APIResourceWrapper()

        GLSettings.api_factory = Site(arw, logFormatter=timedLogFormatter)

        for sock in GLSettings.http_socks:
            listen_tcp_on_sock(reactor, sock.fileno(), GLSettings.api_factory)

        GLSettings.appstate.process_supervisor = ProcessSupervisor(
            GLSettings.https_socks, '127.0.0.1', 8082)

        GLSettings.appstate.process_supervisor.maybe_launch_https_workers()

        GLSettings.start_jobs()

        GLSettings.print_listening_interfaces()
Ejemplo n.º 4
0
    def _test(self, path, version):
        f = 'glbackend-%d.db' % version

        helpers.init_glsettings_for_unit_tests()
        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        self.start_db_file = os.path.abspath(
            os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
        self.final_db_file = os.path.abspath(
            os.path.join(GLSettings.db_path,
                         'glbackend-%d.db' % DATABASE_VERSION))
        self.start_db_uri = GLSettings.make_db_uri(self.start_db_file)
        GLSettings.db_uri = GLSettings.make_db_uri(self.final_db_file)

        shutil.rmtree(GLSettings.db_path, True)
        os.mkdir(GLSettings.db_path)
        dbpath = os.path.join(path, f)
        dbfile = os.path.join(GLSettings.db_path, f)
        shutil.copyfile(dbpath, dbfile)

        # TESTS PRECONDITIONS
        preconditions = getattr(self, 'preconditions_%d' % version, None)
        if preconditions is not None:
            preconditions()

        ret = update_db()

        # TESTS POSTCONDITIONS
        postconditions = getattr(self, 'postconditions_%d' % version, None)
        if postconditions is not None:
            postconditions()

        shutil.rmtree(GLSettings.db_path)
        self.assertNotEqual(ret, -1)
Ejemplo n.º 5
0
    def process_mail_creation(self, store, data):
        receiver_id = data['receiver']['id']

        # Do not spool emails if the receiver has opted out of ntfns for this tip.
        if not data['tip']['enable_notifications']:
          log.debug("Discarding emails for %s due to receiver's preference." % receiver_id)
          return

        # https://github.com/globaleaks/GlobaLeaks/issues/798
        # TODO: the current solution is global and configurable only by the admin
        sent_emails = GLSettings.get_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.debug("Discarding emails for receiver %s due to threshold already exceeded for the current hour" %
                      receiver_id)
            return

        GLSettings.increment_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.info("Reached threshold of %d emails with limit of %d for receiver %s" % (
                     sent_emails,
                     GLSettings.memory_copy.notification_threshold_per_hour,
                     receiver_id)
            )

            # simply changing the type of the notification causes
            # to send the notification_limit_reached
            data['type'] = u'receiver_notification_limit_reached'

        data['notification'] = db_get_notification(store, data['receiver']['language'])
        data['node'] = db_admin_serialize_node(store, data['receiver']['language'])

        if not data['node']['allow_unencrypted'] and data['receiver']['pgp_key_status'] != u'enabled':
            return

        subject, body = Templating().get_mail_subject_and_body(data)

        # If the receiver has encryption enabled encrypt the mail body
        if data['receiver']['pgp_key_status'] == u'enabled':
            gpob = GLBPGP()

            try:
                gpob.load_key(data['receiver']['pgp_key_public'])
                body = gpob.encrypt_message(data['receiver']['pgp_key_fingerprint'], body)
            except Exception as excep:
                log.err("Error in PGP interface object (for %s: %s)! (notification+encryption)" %
                        (data['receiver']['username'], str(excep)))

                return
            finally:
                # the finally statement is always called also if
                # except contains a return or a raise
                gpob.destroy_environment()

        mail = models.Mail({
            'address': data['receiver']['mail_address'],
            'subject': subject,
            'body': body
        })

        store.add(mail)
Ejemplo n.º 6
0
    def migrate_Context(self):
        old_objs = self.store_old.find(self.model_from['Context'])
        for old_obj in old_objs:
            new_obj = self.model_to['Context']()
            for _, v in new_obj._storm_columns.items():
                if v.name == 'tip_timetolive':
                    # NOTE hardcoded policy. . . .
                    tip_ttl = 5 * 365
                    if old_obj.tip_timetolive > tip_ttl:
                        GLSettings.print_msg(
                            '[WARNING] Found an expiration date longer than 5 years! Configuring tips to never expire.'
                        )
                        # If data retention was larger than 5 years the intended goal was
                        # probably to keep the submission around forever.
                        new_obj.tip_timetolive = -1
                    elif old_obj.tip_timetolive < -1:
                        GLSettings.print_msg(
                            '[WARNING] Found a negative tip expiration! Configuring tips to never expire.'
                        )
                        new_obj.tip_timetolive = -1
                    else:
                        new_obj.tip_timetolive = old_obj.tip_timetolive
                    continue

                elif v.name == 'enable_rc_to_wb_files':
                    new_obj.enable_rc_to_wb_files = False
                    continue

                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)
Ejemplo n.º 7
0
 def execute_query(self, query):
     try:
         self.store_new.execute(query + ';')
     except OperationalError as excep:
         GLSettings.print_msg(
             'OperationalError %s while executing query: %s' %
             (excep, query))
         raise excep
Ejemplo n.º 8
0
    def __init__(self, migration_mapping, start_version, store_old, store_new):
        self.appdata = load_appdata()

        self.migration_mapping = migration_mapping
        self.start_version = start_version

        self.store_old = store_old
        self.store_new = store_new

        self.model_from = {}
        self.model_to = {}
        self.entries_count = {}
        self.fail_on_count_mismatch = {}

        for model_name, model_history in migration_mapping.iteritems():
            length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED
            if len(model_history) != length:
                raise TypeError(
                    'Expecting a table with {} statuses ({})'.format(
                        length, model_name))

            self.fail_on_count_mismatch[model_name] = True

            self.model_from[model_name] = self.get_right_model(
                model_name, start_version)
            self.model_to[model_name] = self.get_right_model(
                model_name, start_version + 1)

            if self.model_from[model_name] is not None and self.model_to[
                    model_name] is not None:
                self.entries_count[model_name] = self.store_old.find(
                    self.model_from[model_name]).count()
            else:
                self.entries_count[model_name] = 0

        if self.start_version + 1 == DATABASE_VERSION:
            # we are there!
            if not os.access(GLSettings.db_schema, os.R_OK):
                GLSettings.print_msg(
                    "Unable to access %s ' % GLSettings.db_schema")
                raise IOError('Unable to access db schema file')
            with open(GLSettings.db_schema) as f:
                queries = ''.join(f).split(';')
                for query in queries:
                    self.execute_query(query)

        else:  # manage the migrantion here
            for k, _ in self.migration_mapping.iteritems():
                query = self.get_right_sql_version(k, self.start_version + 1)
                if not query:
                    # the table has been removed
                    continue

                self.execute_query(query)

        self.store_new.commit()
Ejemplo n.º 9
0
    def migrate_model(self, model_name):
        objs_count = self.store_old.find(self.model_from[model_name]).count()

        specific_migration_function = getattr(self, 'migrate_%s' % model_name, None)
        if specific_migration_function is not None:
            GLSettings.print_msg(' ł %s [#%d]' % (model_name, objs_count))
            specific_migration_function()
        else:
            GLSettings.print_msg(' * %s [#%d]' % (model_name, objs_count))
            self.generic_migration_function(model_name)
Ejemplo n.º 10
0
    def migrate_model(self, model_name):
        objs_count = self.store_old.find(self.model_from[model_name]).count()

        specific_migration_function = getattr(self, 'migrate_%s' % model_name, None)
        if specific_migration_function is not None:
            GLSettings.print_msg(' ł %s [#%d]' % (model_name, objs_count))
            specific_migration_function()
        else:
            GLSettings.print_msg(' * %s [#%d]' % (model_name, objs_count))
            self.generic_migration_function(model_name)
Ejemplo n.º 11
0
    def setUp(self):
        GLSettings.set_devel_mode()
        GLSettings.logging = None
        GLSettings.scheduler_threadpool = FakeThreadPool()
        GLSettings.sessions = {}
        GLSettings.failed_login_attempts = 0

        if os.path.isdir('/dev/shm'):
            GLSettings.working_path = '/dev/shm/globaleaks'
            GLSettings.ramdisk_path = '/dev/shm/globaleaks/ramdisk'
        else:
            GLSettings.working_path = './working_path'
            GLSettings.ramdisk_path = './working_path/ramdisk'

        GLSettings.eval_paths()
        GLSettings.remove_directories()
        GLSettings.create_directories()

        self.setUp_dummy()

        yield db.create_tables(self.create_node)

        for fixture in getattr(self, 'fixtures', []):
            yield import_fixture(fixture)

        yield import_memory_variables()

        # override of imported memory variables
        GLSettings.memory_copy.allow_unencrypted = True

        anomaly.Alarm.reset()
        event.EventTrackQueue.reset()
        statistics_sched.StatisticsSchedule.reset()

        self.internationalized_text = load_appdata()['node']['whistleblowing_button']
Ejemplo n.º 12
0
    def __init__(self, migration_mapping, start_version, store_old, store_new):
        self.appdata = load_appdata()

        self.migration_mapping = migration_mapping
        self.start_version = start_version

        self.store_old = store_old
        self.store_new = store_new

        self.model_from = {}
        self.model_to = {}
        self.entries_count = {}
        self.fail_on_count_mismatch = {}

        for model_name, model_history in migration_mapping.iteritems():
            length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED
            if len(model_history) != length:
                raise TypeError('Expecting a table with {} statuses ({})'.format(length, model_name))

            self.fail_on_count_mismatch[model_name] = True

            self.model_from[model_name] = self.get_right_model(model_name, start_version)
            self.model_to[model_name] = self.get_right_model(model_name, start_version + 1)

            if self.model_from[model_name] is not None and self.model_to[model_name] is not None:
                self.entries_count[model_name] = self.store_old.find(self.model_from[model_name]).count()
            else:
                self.entries_count[model_name] = 0

        if self.start_version + 1 == DATABASE_VERSION:
            # we are there!
            if not os.access(GLSettings.db_schema, os.R_OK):
                GLSettings.print_msg("Unable to access %s ' % GLSettings.db_schema")
                raise IOError('Unable to access db schema file')
            with open(GLSettings.db_schema) as f:
                queries = ''.join(f).split(';')
                for query in queries:
                    self.execute_query(query)

        else: # manage the migrantion here
            for k, _ in self.migration_mapping.iteritems():
                query = self.get_right_sql_version(k, self.start_version + 1)
                if not query:
                    # the table has been removed
                    continue

                self.execute_query(query)

        self.store_new.commit()
Ejemplo n.º 13
0
    def _deferred_start(self):
        ret = update_db()

        if ret == -1:
            reactor.stop()

        if ret == 0:
            init_db()

        sync_clean_untracked_files()
        sync_refresh_memory_variables()

        GLSettings.orm_tp.start()

        reactor.addSystemEventTrigger('after', 'shutdown',
                                      GLSettings.orm_tp.stop)

        arw = APIResourceWrapper()

        GLSettings.api_factory = Site(arw, logFormatter=timedLogFormatter)

        for sock in GLSettings.http_socks:
            listen_tcp_on_sock(reactor, sock.fileno(), GLSettings.api_factory)

        GLSettings.state.process_supervisor = ProcessSupervisor(
            GLSettings.https_socks, '127.0.0.1', GLSettings.bind_port)

        yield GLSettings.state.process_supervisor.maybe_launch_https_workers()

        GLSettings.start_jobs()

        print(
            "GlobaLeaks is now running and accessible at the following urls:")

        if GLSettings.memory_copy.reachable_via_web:
            print("- http://%s:%d%s" %
                  (GLSettings.bind_address, GLSettings.bind_port,
                   GLSettings.api_prefix))
            if GLSettings.memory_copy.hostname:
                print("- http://%s:%d%s" %
                      (GLSettings.memory_copy.hostname, GLSettings.bind_port,
                       GLSettings.api_prefix))
        else:
            print("- http://127.0.0.1:%d%s" %
                  (GLSettings.bind_port, GLSettings.api_prefix))

        if GLSettings.onionservice is not None:
            print("- http://%s%s" %
                  (GLSettings.onionservice, GLSettings.api_prefix))
Ejemplo n.º 14
0
def perform_data_update(dbfile):
    store = Store(create_database(GLSettings.make_db_uri(dbfile)))

    enabled_languages = [
        lang.name for lang in store.find(l10n.EnabledLanguage)
    ]

    removed_languages = list(
        set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES))

    if len(removed_languages):
        removed_languages.sort()
        removed_languages = ', '.join(removed_languages)
        raise Exception(
            "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n"
            "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop"
            % removed_languages)

    try:
        db_perform_data_update(store)
        store.commit()
    except:
        store.rollback()
        raise
    finally:
        store.close()
Ejemplo n.º 15
0
    def test_mig_37_valid_tor_hs_key(self):
        self._initStartDB(36)

        from globaleaks.db.migrations import update_37
        t = update_37.TOR_DIR
        update_37.TOR_DIR = GLSettings.db_path

        pk_path = os.path.join(update_37.TOR_DIR, 'private_key')
        hn_path = os.path.join(update_37.TOR_DIR, 'hostname')

        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/private_key'), pk_path)
        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/hostname'), hn_path)

        ret = update_db()
        self.assertEqual(ret, None)

        new_uri = GLSettings.make_db_uri(
            os.path.join(GLSettings.db_path, GLSettings.db_file_name))
        store = Store(create_database(new_uri))
        hs = config.NodeFactory(store).get_val('onionservice')
        pk = config.PrivateFactory(store).get_val('tor_onion_key')

        self.assertEqual('lftx7dbyvlc5txtl.onion', hs)
        with open(os.path.join(helpers.DATA_DIR,
                               'tor/ephemeral_service_key')) as f:
            saved_key = f.read().strip()
        self.assertEqual(saved_key, pk)

        store.close()

        shutil.rmtree(GLSettings.db_path)
        update_37.TOR_DIR = t
Ejemplo n.º 16
0
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        shutil.rmtree(GLSettings.db_path, True)
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val(u'version', self.dummy_ver)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop(
            'smtp_password')
        self.dp = u'yes_you_really_should_change_me'
Ejemplo n.º 17
0
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop('smtp_password')
        self.dp = u'yes_you_really_should_change_me'
Ejemplo n.º 18
0
def init_glsettings_for_unit_tests():
    GLSettings.testing = True
    GLSettings.set_devel_mode()
    GLSettings.logging = None
    GLSettings.failed_login_attempts = 0
    GLSettings.working_path = './working_path'

    GLSettings.eval_paths()

    GLSettings.set_ramdisk_path()

    GLSettings.remove_directories()
    GLSettings.create_directories()

    GLSettings.orm_tp = FakeThreadPool()

    GLSessions.clear()
    def operation(self):
        net_agent = GLSettings.get_agent()
        try:
            log.debug('Fetching list of Tor exit nodes')
            yield GLSettings.appstate.tor_exit_set.update(net_agent)
        except ConnectionRefusedError as e:
            log.err('Exit relay fetch failed: %s' % e)

        log.debug('Retrieved a list of %d exit nodes' % len(GLSettings.appstate.tor_exit_set))
Ejemplo n.º 20
0
    def operation(self):
        # ------- BEGIN Anomalies section -------
        anomalies_to_save = get_anomalies()
        save_anomalies(anomalies_to_save)
        # ------- END Anomalies section ---------

        # ------- BEGIN Stats section -----------
        current_time = datetime_now()
        statistic_summary = get_statistics()
        save_statistics(GLSettings.stats_collection_start_time, current_time,
                        statistic_summary)
        # ------- END Stats section -------------

        # Hourly Resets
        GLSettings.reset_hourly()

        log.debug("Saved stats and time updated, keys saved %d" %
                  len(statistic_summary.keys()))
Ejemplo n.º 21
0
    def process_mail_creation(self, store, data):
        receiver_id = data['receiver']['id']

        # Do not spool emails if the receiver has opted out of ntfns for this tip.
        if not data['tip']['enable_notifications']:
            log.debug("Discarding emails for %s due to receiver's preference.", receiver_id)
            return

        # https://github.com/globaleaks/GlobaLeaks/issues/798
        # TODO: the current solution is global and configurable only by the admin
        sent_emails = GLSettings.get_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notif.notification_threshold_per_hour:
            log.debug("Discarding emails for receiver %s due to threshold already exceeded for the current hour",
                      receiver_id)
            return

        GLSettings.increment_mail_counter(receiver_id)
        if sent_emails >= GLSettings.memory_copy.notif.notification_threshold_per_hour:
            log.info("Reached threshold of %d emails with limit of %d for receiver %s",
                     sent_emails,
                     GLSettings.memory_copy.notif.notification_threshold_per_hour,
                     receiver_id)

            # simply changing the type of the notification causes
            # to send the notification_limit_reached
            data['type'] = u'receiver_notification_limit_reached'

        data['notification'] = self.serialize_config(store, 'notification', data['receiver']['language'])
        data['node'] = self.serialize_config(store, 'node', data['receiver']['language'])

        if not data['node']['allow_unencrypted'] and len(data['receiver']['pgp_key_public']) == 0:
            return

        subject, body = Templating().get_mail_subject_and_body(data)

        # If the receiver has encryption enabled encrypt the mail body
        if data['receiver']['pgp_key_public']:
            body = encrypt_message(data['receiver']['pgp_key_public'], body)

        store.add(models.Mail({
            'address': data['receiver']['mail_address'],
            'subject': subject,
            'body': body
        }))
Ejemplo n.º 22
0
def perform_data_update(dbfile):
    new_tmp_store = Store(create_database(GLSettings.make_db_uri(dbfile)))
    try:
        db_perform_data_update(new_tmp_store)
        new_tmp_store.commit()
    except:
        new_tmp_store.rollback()
        raise
    finally:
        new_tmp_store.close()
Ejemplo n.º 23
0
def init_glsettings_for_unit_tests():
    GLSettings.testing = True
    GLSettings.set_devel_mode()
    GLSettings.logging = None
    GLSettings.failed_login_attempts = 0
    GLSettings.working_path = './working_path'
    GLSettings.onionservice = 'kpvz7ki2v5agwt35.onion'

    GLSettings.eval_paths()

    GLSettings.set_ramdisk_path()

    GLSettings.remove_directories()
    GLSettings.create_directories()

    GLSettings.orm_tp = FakeThreadPool()

    GLSettings.memory_copy.hostname = 'localhost'

    GLSessions.clear()
Ejemplo n.º 24
0
def init_glsettings_for_unit_tests():
    GLSettings.testing = True
    GLSettings.set_devel_mode()
    GLSettings.logging = None
    GLSettings.scheduler_threadpool = FakeThreadPool()
    GLSettings.sessions.clear()
    GLSettings.failed_login_attempts = 0
    GLSettings.working_path = './working_path'
    GLSettings.ramdisk_path = os.path.join(GLSettings.working_path, 'ramdisk')

    GLSettings.eval_paths()
    GLSettings.remove_directories()
    GLSettings.create_directories()
Ejemplo n.º 25
0
def init_glsettings_for_unit_tests():
    GLSettings.testing = True
    GLSettings.set_devel_mode()
    GLSettings.logging = None
    GLSettings.scheduler_threadpool = FakeThreadPool()
    GLSettings.sessions.clear()
    GLSettings.failed_login_attempts = 0
    GLSettings.working_path = './working_path'
    GLSettings.ramdisk_path = os.path.join(GLSettings.working_path, 'ramdisk')

    GLSettings.eval_paths()
    GLSettings.remove_directories()
    GLSettings.create_directories()
Ejemplo n.º 26
0
def pre_listen_startup():
    mask = 0
    if GLSettings.devel_mode:
        mask = 9000

    GLSettings.http_socks = []
    for port in GLSettings.bind_ports:
        port = port + mask if port < 1024 else port
        http_sock, fail = reserve_port_for_ip(GLSettings.bind_address, port)
        if fail is not None:
            log.err("Could not reserve socket for %s (error: %s)" %
                    (fail[0], fail[1]))
        else:
            GLSettings.http_socks += [http_sock]

    https_sock, fail = reserve_port_for_ip(GLSettings.bind_address, 443 + mask)
    if fail is not None:
        log.err("Could not reserve socket for %s (error: %s)" %
                (fail[0], fail[1]))
    else:
        GLSettings.https_socks = [https_sock]

    GLSettings.fix_file_permissions()
    GLSettings.drop_privileges()
    GLSettings.check_directories()
Ejemplo n.º 27
0
def pre_listen_startup():
    mask = 0
    if GLSettings.devel_mode:
        mask = 8000

    GLSettings.http_socks = []

    # Allocate local ports
    for port in GLSettings.bind_local_ports:
        http_sock, fail = reserve_port_for_ip('127.0.0.1', port)
        if fail is not None:
            log.err("Could not reserve socket for %s (error: %s)" % (fail[0], fail[1]))
        else:
            GLSettings.http_socks += [http_sock]

    # Allocate remote ports
    for port in GLSettings.bind_remote_ports:
        sock, fail = reserve_port_for_ip(GLSettings.bind_address, port+mask)
        if fail is not None:
            log.err("Could not reserve socket for %s (error: %s)" % (fail[0], fail[1]))
            continue

        if port == 80:
            GLSettings.http_socks += [sock]
        elif port == 443:
            GLSettings.https_socks += [sock]

    GLSettings.fix_file_permissions()
    GLSettings.drop_privileges()
    GLSettings.check_directories()
Ejemplo n.º 28
0
    def postconditions_36(self):
        new_uri = GLSettings.make_db_uri(
            os.path.join(GLSettings.db_path, GLSettings.db_file_name))
        store = Store(create_database(new_uri))
        hs = config.NodeFactory(store).get_val(u'onionservice')
        pk = config.PrivateFactory(store).get_val(u'tor_onion_key')

        self.assertEqual('lftx7dbyvlc5txtl.onion', hs)
        with open(os.path.join(helpers.DATA_DIR,
                               'tor/ephemeral_service_key')) as f:
            saved_key = f.read().strip()
        self.assertEqual(saved_key, pk)
        store.close()
Ejemplo n.º 29
0
    def _wrap(self, function, *args, **kwargs):
        """
        Wrap provided function calling it inside a thread and
        passing the store to it.
        """
        self.store = self.get_store()

        try:
            if self.instance:
                result = function(self.instance, self.store, *args, **kwargs)
            else:
                result = function(self.store, *args, **kwargs)

        except exceptions.DisconnectionError as e:
            transaction.abort()
            # we print the exception here because we do not propagate it
            GLSettings.log_debug(e)
            result = None
        except exceptions.IntegrityError as e:
            transaction.abort()
            raise DatabaseIntegrityError(str(e))
        except HTTPError as excep:
            transaction.abort()
            raise excep
        except:
            transaction.abort()
            self.store.close()
            # propagate the exception
            raise
        else:
            if not self.readonly:
                self.store.commit()
            else:
                self.store.flush()
                self.store.invalidate()
        finally:
            self.store.close()

        return result
Ejemplo n.º 30
0
    def _test(self, path, f):
        helpers.init_glsettings_for_unit_tests()
        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))
        GLSettings.db_uri = GLSettings.make_db_uri(final_db_file)

        os.mkdir(GLSettings.db_path)
        dbpath = os.path.join(path, f)
        dbfile = os.path.join(GLSettings.db_path, f)
        shutil.copyfile(dbpath, dbfile)
        ret = perform_system_update()
        shutil.rmtree(GLSettings.db_path)
        self.assertNotEqual(ret, -1)
Ejemplo n.º 31
0
    def _wrap(self, function, *args, **kwargs):
        """
        Wrap provided function calling it inside a thread and
        passing the store to it.
        """
        self.store = self.get_store()

        try:
            if self.instance:
                result = function(self.instance, self.store, *args, **kwargs)
            else:
                result = function(self.store, *args, **kwargs)

        except exceptions.DisconnectionError as e:
            transaction.abort()
            # we print the exception here because we do not propagate it
            GLSettings.log_debug(e)
            result = None
        except exceptions.IntegrityError as e:
            transaction.abort()
            raise DatabaseIntegrityError(str(e))
        except HTTPError as excep:
            transaction.abort()
            raise excep
        except:
            transaction.abort()
            self.store.close()
            # propagate the exception
            raise
        else:
            if not self.readonly:
                self.store.commit()
            else:
                self.store.flush()
                self.store.invalidate()
        finally:
            self.store.close()

        return result
Ejemplo n.º 32
0
    def _initStartDB(self, target_ver):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % target_ver
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        self.store = Store(create_database(GLSettings.db_uri))
Ejemplo n.º 33
0
    def setUp(self):
        test_config.skipCase(self)
        self.test_reactor = task.Clock()

        jobs.base.test_reactor = self.test_reactor
        tempdict.test_reactor = self.test_reactor
        token.TokenList.reactor = self.test_reactor
        GLSessions.reactor = self.test_reactor

        init_glsettings_for_unit_tests()

        self.setUp_dummy()

        if self.initialize_test_database_using_archived_db:
            shutil.copy(
                os.path.join(TEST_DIR, 'db', 'empty', GLSettings.db_file_name),
                os.path.join(GLSettings.working_path, 'db',
                             GLSettings.db_file_name))
        else:
            yield db.init_db(use_single_lang=True)

        allow_unencrypted = self.encryption_scenario in ['PLAINTEXT', 'MIXED']

        yield update_node_setting('allow_unencrypted', allow_unencrypted)

        yield db.refresh_memory_variables()

        sup = ProcessSupervisor([], '127.0.0.1', 8082)
        GLSettings.state.process_supervisor = sup

        Alarm.reset()
        event.EventTrackQueue.clear()
        GLSettings.reset_hourly()

        GLSettings.submission_minimum_delay = 0

        self.internationalized_text = load_appdata(
        )['node']['whistleblowing_button']
Ejemplo n.º 34
0
    def _initStartDB(self, target_ver):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % target_ver
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        self.store = Store(create_database(GLSettings.db_uri))
Ejemplo n.º 35
0
    def operation(self):
        # ------- BEGIN Anomalies section -------
        anomalies_to_save = get_anomalies()
        if anomalies_to_save:
            save_anomalies(anomalies_to_save)
            log.debug("Stored %d anomalies collected during the last hour",
                      len(anomalies_to_save))

        # ------- END Anomalies section ---------

        # ------- BEGIN Stats section -----------
        current_time = datetime_now()
        statistic_summary = get_statistics()
        if statistic_summary:
            save_statistics(GLSettings.stats_collection_start_time,
                            current_time, statistic_summary)
            log.debug("Stored statistics %s collected from %s to %s",
                      statistic_summary,
                      GLSettings.stats_collection_start_time, current_time)
        # ------- END Stats section -------------

        # Hourly Resets
        GLSettings.reset_hourly()
Ejemplo n.º 36
0
    def _test(self, path, f):
        helpers.init_glsettings_for_unit_tests()
        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        final_db_file = os.path.abspath(
            os.path.join(GLSettings.db_path,
                         'glbackend-%d.db' % DATABASE_VERSION))
        GLSettings.db_uri = GLSettings.make_db_uri(final_db_file)

        os.mkdir(GLSettings.db_path)
        dbpath = os.path.join(path, f)
        dbfile = os.path.join(GLSettings.db_path, f)
        shutil.copyfile(dbpath, dbfile)
        ret = perform_system_update()
        shutil.rmtree(GLSettings.db_path)
        self.assertNotEqual(ret, -1)
Ejemplo n.º 37
0
    def post(self):
        if GLSettings.memory_copy.hostname == '':
            raise errors.ValidationError('hostname is not set')

        net_agent = GLSettings.get_agent()

        t = ('http', GLSettings.memory_copy.hostname, 'robots.txt', None, None)
        url = bytes(urlparse.urlunsplit(t))
        try:
            resp = yield net_agent.request('GET', url)
            body = yield readBody(resp)

            server_h = resp.headers.getRawHeaders('Server', [None])[-1].lower()
            if not body.startswith('User-agent: *') or server_h != 'globaleaks':
                raise EnvironmentError('Response unexpected')
        except (EnvironmentError, ConnectError) as e:
            log.err(e)
            raise errors.ExternalResourceError()
Ejemplo n.º 38
0
    def start_globaleaks(self):
        try:
            GLSettings.fix_file_permissions()
            GLSettings.drop_privileges()
            GLSettings.check_directories()

            # Check presence of an existing database and eventually perform its migration
            check = check_db_files()
            if check == -1:
                self._reactor.stop()
            elif check == 0:
                yield init_db()
            else:
                yield update_version()
                yield init_appdata()

            yield clean_untracked_files()

            yield refresh_memory_variables()

            if GLSettings.cmdline_options:
                yield apply_cmdline_options()

            self.start_asynchronous_jobs()

            log.msg("GLBackend is now running")
            for ip in GLSettings.bind_addresses:
                log.msg("Visit http://%s:%d to interact with me" %
                        (ip, GLSettings.bind_port))

            for host in GLSettings.accepted_hosts:
                if host not in GLSettings.bind_addresses:
                    log.msg("Visit http://%s:%d to interact with me" %
                            (host, GLSettings.bind_port))

            for other in GLSettings.configured_hosts:
                if other:
                    log.msg("Visit %s to interact with me" % other)

            log.msg(
                "Remind: GlobaLeaks is not accessible from other URLs, this is strictly enforced"
            )
            log.msg(
                "Check documentation in https://github.com/globaleaks/GlobaLeaks/wiki/ for special enhancement"
            )

        except Exception as excep:
            log.err(
                "ERROR: Cannot start GlobaLeaks; please manual check the error."
            )
            log.err("EXCEPTION: %s" % excep)
            self._reactor.stop()
Ejemplo n.º 39
0
def perform_data_update(dbfile):
    store = Store(create_database(GLSettings.make_db_uri(dbfile)))

    enabled_languages = [lang.name for lang in store.find(l10n.EnabledLanguage)]

    removed_languages = list(set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES))

    if len(removed_languages):
        removed_languages.sort()
        removed_languages = ', '.join(removed_languages)
        raise Exception("FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n"
                        "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages)


    try:
        db_perform_data_update(store)
        store.commit()
    except:
        store.rollback()
        raise
    finally:
        store.close()
Ejemplo n.º 40
0
    def start_globaleaks(self):
        try:
            GLSettings.fix_file_permissions()
            GLSettings.drop_privileges()
            GLSettings.check_directories()

            # Check presence of an existing database and eventually perform its migration
            check = check_db_files()
            if check == -1:
                 self._reactor.stop()
            elif check == 0:
                yield init_db()
            else:
                yield update_version()
                yield init_appdata()

            yield clean_untracked_files()

            yield refresh_memory_variables()

            if GLSettings.cmdline_options:
                yield apply_cmdline_options()

            self.start_asynchronous_jobs()

            log.msg("GLBackend is now running")
            for ip in GLSettings.bind_addresses:
                log.msg("Visit http://%s:%d to interact with me" % (ip, GLSettings.bind_port))

            for host in GLSettings.accepted_hosts:
                if host not in GLSettings.bind_addresses:
                    log.msg("Visit http://%s:%d to interact with me" % (host, GLSettings.bind_port))

            for other in GLSettings.configured_hosts:
                if other:
                    log.msg("Visit %s to interact with me" % other)

            log.msg("Remind: GlobaLeaks is not accessible from other URLs, this is strictly enforced")
            log.msg("Check documentation in https://github.com/globaleaks/GlobaLeaks/wiki/ for special enhancement")

        except Exception as excep:
            log.err("ERROR: Cannot start GlobaLeaks; please manual check the error.")
            log.err("EXCEPTION: %s" % excep)
            self._reactor.stop()
Ejemplo n.º 41
0
def globaleaks_start():
    GLSettings.fix_file_permissions()
    GLSettings.drop_privileges()
    GLSettings.check_directories()

    if not GLSettings.accepted_hosts:
        log.err("Missing a list of hosts usable to contact GLBackend, abort")
        return False

    d = create_tables()

    d.addCallback(clean_untracked_files)

    @d.addCallback
    @defer.inlineCallbacks
    def cb(res):
        start_asynchronous()
        yield import_memory_variables()
        tor_configured_hosts = yield apply_cli_options()

        log.msg("GLBackend is now running")
        for ip in GLSettings.bind_addresses:
            log.msg("Visit http://%s:%d to interact with me" %
                    (ip, GLSettings.bind_port))

        for host in GLSettings.accepted_hosts:
            if host not in GLSettings.bind_addresses:
                log.msg("Visit http://%s:%d to interact with me" %
                        (host, GLSettings.bind_port))

        if tor_configured_hosts:
            for other in tor_configured_hosts:
                if other:
                    log.msg("Visit %s to interact with me" % other)

        log.msg(
            "Remind: GlobaLeaks is not accessible from other URLs, this is strictly enforced"
        )
        log.msg(
            "Check documentation in https://github.com/globaleaks/GlobaLeaks/wiki/ for special enhancement"
        )

    return True
Ejemplo n.º 42
0
    def start_globaleaks(self):
        try:
            GLSettings.fix_file_permissions()
            GLSettings.drop_privileges()
            GLSettings.check_directories()

            if GLSettings.initialize_db:
                yield init_db()
            else:
                yield update_version()
                yield init_appdata()

            yield clean_untracked_files()

            yield refresh_memory_variables()

            self.start_asynchronous_jobs()

        except Exception as excep:
            log.err("ERROR: Cannot start GlobaLeaks; please manual check the error.")
            log.err("EXCEPTION: %s" % excep)
            self._reactor.stop()
Ejemplo n.º 43
0
    def start_globaleaks(self):
        try:
            GLSettings.fix_file_permissions()
            GLSettings.drop_privileges()
            GLSettings.check_directories()

            GLSettings.orm_tp.start()
            self._reactor.addSystemEventTrigger('after', 'shutdown', GLSettings.orm_tp.stop)

            if GLSettings.initialize_db:
                yield init_db()

            yield clean_untracked_files()

            yield refresh_memory_variables()

            self.start_asynchronous_jobs()

        except Exception as excep:
            log.err("ERROR: Cannot start GlobaLeaks; please manually check the error.")
            log.err("EXCEPTION: %s" % excep)
            self._reactor.stop()
Ejemplo n.º 44
0
def globaleaks_start():
    GLSettings.fix_file_permissions()
    GLSettings.drop_privileges()
    GLSettings.check_directories()

    if not GLSettings.accepted_hosts:
        log.err("Missing a list of hosts usable to contact GLBackend, abort")
        return False

    d = create_tables()

    d.addCallback(clean_untracked_files)

    @d.addCallback
    @defer.inlineCallbacks
    def cb(res):
        start_asynchronous()
        yield import_memory_variables()
        tor_configured_hosts = yield apply_cli_options()

        log.msg("GLBackend is now running")
        for ip in GLSettings.bind_addresses:
            log.msg("Visit http://%s:%d to interact with me" % (ip, GLSettings.bind_port))

        for host in GLSettings.accepted_hosts:
            if host not in GLSettings.bind_addresses:
                log.msg("Visit http://%s:%d to interact with me" % (host, GLSettings.bind_port))

        if tor_configured_hosts:
            for other in tor_configured_hosts:
                if other:
                    log.msg("Visit %s to interact with me" % other)

        log.msg("Remind: GlobaLeaks is not accessible from other URLs, this is strictly enforced")
        log.msg("Check documentation in https://github.com/globaleaks/GlobaLeaks/wiki/ for special enhancement")

    return True
Ejemplo n.º 45
0
 def execute_query(self, query):
     try:
         self.store_new.execute(query + ';')
     except OperationalError as excep:
         GLSettings.print_msg('OperationalError %s while executing query: %s' % (excep, query))
         raise excep
Ejemplo n.º 46
0
def filter_notification_event(notifque):
    """
    :param notifque: the current notification event queue
    :return: a modified queue in the case some email has not to be sent
    Basically performs two filtering; they are defined in:
     1) issue #444
     2) issue #798
    """

    # Here we collect the Storm event of Files having as key the Tip
    files_event_by_tip = {}

    _tmp_list = []
    return_filtered_list = []
    # to be smoked Storm.id
    orm_id_to_be_skipped = []

    for ne in notifque:
        if ne['trigger'] !=  u'Tip':
            continue
        files_event_by_tip.update({ne['tip_info']['id'] : []})

    log.debug("Filtering function: iterating over %d Tip" % len(files_event_by_tip.keys()))
    # not files_event_by_tip contains N keys with an empty list,
    # I'm looping two times because dict has random ordering
    for ne in notifque:

        if GLSettings.memory_copy.disable_receiver_notification_emails:
            orm_id_to_be_skipped.append(ne['orm_id'])
            continue

        if ne['trigger'] != u'File':
            _tmp_list.append(ne)
            continue

        if ne['tip_info']['id'] in files_event_by_tip:
            orm_id_to_be_skipped.append(ne['orm_id'])
        else:
            _tmp_list.append(ne)

    if len(orm_id_to_be_skipped):
        if GLSettings.memory_copy.disable_receiver_notification_emails:
            log.debug("All the %d mails will be marked as sent because the admin has disabled receivers notifications" %
                      len(orm_id_to_be_skipped))
        else:
            log.debug("Filtering function: Marked %d Files notification to be suppressed cause part of a submission" %
                      len(orm_id_to_be_skipped))

    for ne in _tmp_list:
        receiver_id = ne['receiver_info']['id']

        sent_emails = GLSettings.get_mail_counter(receiver_id)

        if sent_emails >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.debug("Discarding email for receiver %s due to threshold already exceeded for the current hour" %
                      receiver_id)
            orm_id_to_be_skipped.append(ne['orm_id'])
            continue

        GLSettings.increment_mail_counter(receiver_id)

        if sent_emails + 1 >= GLSettings.memory_copy.notification_threshold_per_hour:
            log.info("Reached threshold of %d emails with limit of %d for receiver %s" % (
                sent_emails,
                GLSettings.memory_copy.notification_threshold_per_hour,
                receiver_id)
            )

            # Append
            anomalyevent = OD()
            anomalyevent.type = u'receiver_notification_limit_reached'
            anomalyevent.notification_settings = ne.notification_settings
            anomalyevent.node_info = ne.node_info
            anomalyevent.context_info = None
            anomalyevent.receiver_info = ne.receiver_info
            anomalyevent.tip_info = None
            anomalyevent.subevent_info = None
            anomalyevent.orm_id = '0'

            return_filtered_list.append(anomalyevent)

            orm_id_to_be_skipped.append(ne['orm_id'])
            continue

        return_filtered_list.append(ne)

    log.debug("Mails filtering completed passing from #%d to #%d events" %
              (len(notifque), len(return_filtered_list)))

    # return the new list of event and the list of Storm.id
    return return_filtered_list, orm_id_to_be_skipped
Ejemplo n.º 47
0
def perform_version_update(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg("Migrations from DB version lower than %d are no more supported!" % FIRST_DATABASE_VERSION_SUPPORTED)
        GLSettings.print_msg("If you can't create your Node from scratch, contact us asking for support.")
        quit()

    tmpdir =  os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration prologue: %s" % exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg("Failure while migrating table %s: %s " % (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration epilogue: %s " % exception)
                    raise exception

            finally:
                # the database should bee always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(create_database('sqlite:' + new_db_file))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                     count = store_verify.find(migration_script.model_to[model_name]).count()
                     if migration_script.entries_count[model_name] != count:
                         if migration_script.fail_on_count_mismatch[model_name]:
                             raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                  (model_name, count, migration_script.entries_count[model_name]))
                         else:
                             GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                  (model_name, migration_script.entries_count[model_name], count))
                     else:
                         GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                              (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

    except Exception as exception:
        # simply propagage the exception
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % DATABASE_VERSION)), final_db_file)
        os.remove(orig_db_file)

    finally:
        # always cleanup the temporary directory used for the migration
        shutil.rmtree(tmpdir, True)