Esempio n. 1
0
    def _wrap(self, function, *args, **kwargs):
        """
        Wrap provided function calling it inside a thread and
        passing the store to it.
        """
        with transact_lock:
            start_time = datetime.now()
            store = Store(create_database(GLSettings.db_uri))

            try:
                if self.instance:
                    result = function(self.instance, store, *args, **kwargs)
                else:
                    result = function(store, *args, **kwargs)

                store.commit()
            except:
                store.rollback()
                raise
            else:
                return result
            finally:
                store.reset()
                store.close()

                duration = timedelta_to_milliseconds(datetime.now() -
                                                     start_time)
                msg = "Query [%s] executed in %.1fms" % (self.method.__name__,
                                                         duration)
                if duration > self.timelimit:
                    log.err(msg)
                    schedule_exception_email(msg)
                else:
                    log.debug(msg)
Esempio n. 2
0
def perform_data_update(dbfile):
    store = Store(create_database(GLSettings.make_db_uri(dbfile)))

    enabled_languages = [
        lang.name for lang in store.find(l10n.EnabledLanguage)
    ]

    removed_languages = list(
        set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES))

    if len(removed_languages):
        removed_languages.sort()
        removed_languages = ', '.join(removed_languages)
        raise Exception(
            "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n"
            "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop"
            % removed_languages)

    try:
        db_perform_data_update(store)
        store.commit()
    except:
        store.rollback()
        raise
    finally:
        store.close()
Esempio n. 3
0
def createUserDB(connString):
    '''Creates user database and default admin account 
    with password admin'''
    from tsload.user.localauth import LocalAuth
    
    database = create_database(connString)
    store = Store(database)
    
    TableSchema(database, User).create(store)
    TableSchema(database, Role).create(store)
    
    localAuth = LocalAuth()
    
    admin = User()
    
    admin.name = 'admin'
    admin.gecosName = u'TSLoad Administrator'
    admin.authService = 'local'
    
    localAuth.changePassword(admin, 'admin')
    
    store.add(admin)
    
    adminRole = Role()
    adminRole.user = admin
    adminRole.role = 'admin'
    
    store.add(adminRole)
    
    store.commit()
    
    store.close()
Esempio n. 4
0
def createExpsvcDB(connString):
    database = create_database(connString)
    store = Store(database)
    
    TableSchema(database, Agent).create(store)
    TableSchema(database, AgentResource).create(store)
    TableSchema(database, AgentResourceChild).create(store)
    TableSchema(database, WorkloadType).create(store)
    TableSchema(database, WorkloadParam).create(store)
    TableSchema(database, ExperimentProfile).create(store)
    TableSchema(database, ExperimentThreadPool).create(store)
    TableSchema(database, ExperimentWorkload).create(store)
    TableSchema(database, ExperimentWorkloadResource).create(store)
    
    store.commit()
    store.close()
Esempio n. 5
0
def createExpsvcDB(connString):
    database = create_database(connString)
    store = Store(database)

    TableSchema(database, Agent).create(store)
    TableSchema(database, AgentResource).create(store)
    TableSchema(database, AgentResourceChild).create(store)
    TableSchema(database, WorkloadType).create(store)
    TableSchema(database, WorkloadParam).create(store)
    TableSchema(database, ExperimentProfile).create(store)
    TableSchema(database, ExperimentThreadPool).create(store)
    TableSchema(database, ExperimentWorkload).create(store)
    TableSchema(database, ExperimentWorkloadResource).create(store)

    store.commit()
    store.close()
Esempio n. 6
0
    def test_json_property(self):
        """The JSON property is encoded as JSON"""
        class TestModel(object):
            __storm_table__ = "json_test"

            id = Int(primary=True)
            json = JSON()

        connection = self.database.connect()
        value = {"a": 3, "b": "foo", "c": None}
        db_value = json.dumps(value).decode("utf-8")
        connection.execute("INSERT INTO json_test (json) VALUES (?)",
                           (db_value, ))
        connection.commit()

        store = Store(self.database)
        obj = store.find(TestModel).one()
        store.close()
        # The JSON object is decoded to python
        self.assertEqual(value, obj.json)
Esempio n. 7
0
def perform_data_update(dbfile):
    store = Store(create_database(GLSettings.make_db_uri(dbfile)))

    enabled_languages = [lang.name for lang in store.find(l10n.EnabledLanguage)]

    removed_languages = list(set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES))

    if len(removed_languages):
        removed_languages.sort()
        removed_languages = ', '.join(removed_languages)
        raise Exception("FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n"
                        "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages)


    try:
        db_perform_data_update(store)
        store.commit()
    except:
        store.rollback()
        raise
    finally:
        store.close()
Esempio n. 8
0
    def _wrap(self, function, *args, **kwargs):
        """
        Wrap provided function calling it inside a thread and
        passing the store to it.
        """
        with transact_lock:
            store = Store(create_database(GLSettings.db_uri))

            try:
                if self.instance:
                    result = function(self.instance, store, *args, **kwargs)
                else:
                    result = function(store, *args, **kwargs)

                store.commit()
            except:
                store.rollback()
                raise
            else:
                return result
            finally:
                store.reset()
                store.close()
Esempio n. 9
0
def perform_schema_migration(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg(
            "Migrations from DB version lower than %d are no longer supported!"
            % FIRST_DATABASE_VERSION_SUPPORTED)
        quit()

    tmpdir = os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(
        os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(
        os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    new_db_file = None

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(
                os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(
                os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" %
                                 (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module(
                "globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(
                migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg(
                        "Failure while executing migration prologue: %s" %
                        exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[
                            model_name] is not None and migration_script.model_to[
                                model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg(
                                "Failure while migrating table %s: %s " %
                                (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg(
                        "Failure while executing migration epilogue: %s " %
                        exception)
                    raise exception

            finally:
                # the database should be always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(
                create_database(GLSettings.make_db_uri(new_db_file)))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[
                        model_name] is not None and migration_script.model_to[
                            model_name] is not None:
                    count = store_verify.find(
                        migration_script.model_to[model_name]).count()
                    if migration_script.entries_count[model_name] != count:
                        if migration_script.fail_on_count_mismatch[model_name]:
                            raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                 (model_name, count, migration_script.entries_count[model_name]))
                        else:
                            GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                 (model_name, migration_script.entries_count[model_name], count))
                    else:
                        GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                             (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

        perform_data_update(new_db_file)
    except Exception as exception:
        # simply propagate the exception
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(new_db_file, final_db_file)
        security.overwrite_and_remove(orig_db_file)

    finally:
        # Always cleanup the temporary directory used for the migration
        for f in os.listdir(tmpdir):
            tmp_db_file = os.path.join(tmpdir, f)
            security.overwrite_and_remove(tmp_db_file)
        shutil.rmtree(tmpdir)
Esempio n. 10
0
def perform_schema_migration(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED)
        quit()

    tmpdir =  os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    new_db_file = None

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration prologue: %s" % exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg("Failure while migrating table %s: %s " % (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration epilogue: %s " % exception)
                    raise exception

            finally:
                # the database should be always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(create_database(GLSettings.make_db_uri(new_db_file)))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                     count = store_verify.find(migration_script.model_to[model_name]).count()
                     if migration_script.entries_count[model_name] != count:
                         if migration_script.fail_on_count_mismatch[model_name]:
                             raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                  (model_name, count, migration_script.entries_count[model_name]))
                         else:
                             GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                  (model_name, migration_script.entries_count[model_name], count))
                     else:
                         GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                              (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

        perform_data_update(new_db_file)
    except Exception as exception:
        GLSettings.print_msg("[FATAL]: ", exception)
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(new_db_file, final_db_file)
        security.overwrite_and_remove(orig_db_file)

    finally:
        # Always cleanup the temporary directory used for the migration
        for f in os.listdir(tmpdir):
            tmp_db_file = os.path.join(tmpdir, f)
            security.overwrite_and_remove(tmp_db_file)
        shutil.rmtree(tmpdir)
Esempio n. 11
0
class TSUserAgent(TSLocalAgent):
    agentId = userAgentId

    uuid = userAgentUUID
    agentType = userAgentType

    def __init__(self, server, connString):
        TSLocalAgent.__init__(self, server)

        self.client.getId()

        self.logger = logging.getLogger('UserAgent')

        self.rootAgent = server.localAgents[0]

        self.agentUsers = {}

        self.authServices = {'local': LocalAuth()}

        self.database = create_database(connString)
        self.dbStore = Store(self.database)

        self.server.listenerFlows.append(
            Flow(dstAgentId=userAgentId, command='authUser'))

    @TSMethodImpl(UserAgent.authUser)
    def authUser(self, context, **kw):
        @inlineCallbacks
        def implementation(context, userName, userPassword):
            userSet = yield self.dbStore.find(User, User.name == str(userName))
            user = yield userSet.one()

            self.logger.info('Authorizing user %s', userName)

            if user is None:
                self.logger.warning('Error authorizing user: no such user: %s',
                                    userName)
                raise UserAuthError('No such user: %s' % userName)

            authMethod = self.authServices[user.authService]

            if authMethod.authentificate(user, userPassword):
                agentId = context.client.getId()
                self.agentUsers[agentId] = user.id

                roles = yield user.roles
                role = self._setupRoles(context.client, roles)

            userDescr = TSUserDescriptor()
            userDescr.name = user.gecosName
            userDescr.role = role

            returnValue(userDescr)

        return implementation(context, **kw)

    def _setupRoles(self, client, roles):
        # First pass - identify maximum role
        maxRole = TSServerClient.AUTH_NONE

        for role in roles:
            if role.role == 'admin':
                maxRole = TSServerClient.AUTH_ADMIN
            elif role.role == 'operator' and maxRole != TSServerClient.AUTH_ADMIN:
                maxRole = TSServerClient.AUTH_OPERATOR
            else:
                maxRole = TSServerClient.AUTH_USER

        client.authorize(maxRole)

        if maxRole != TSServerClient.AUTH_ADMIN:
            # TODO: For user/operator need to set ACLs
            pass

        return maxRole

    def onDisconnect(self):
        self.dbStore.close()
Esempio n. 12
0
class TSUserAgent(TSLocalAgent):
    agentId = userAgentId
    
    uuid = userAgentUUID
    agentType = userAgentType
    
    def __init__(self, server, connString):
        TSLocalAgent.__init__(self, server)
        
        self.client.getId()
        
        self.logger = logging.getLogger('UserAgent')
        
        self.rootAgent = server.localAgents[0]
        
        self.agentUsers = {}
        
        self.authServices = {'local': LocalAuth()}
        
        self.database = create_database(connString)
        self.dbStore = Store(self.database)
        
        self.server.listenerFlows.append(Flow(dstAgentId = userAgentId, 
                                              command = 'authUser'))
    
    @TSMethodImpl(UserAgent.authUser)
    def authUser(self, context, **kw):
        @inlineCallbacks
        def implementation(context, userName, userPassword):
            userSet = yield self.dbStore.find(User, User.name == str(userName))
            user = yield userSet.one()
            
            self.logger.info('Authorizing user %s', userName)
            
            if user is None:
                self.logger.warning('Error authorizing user: no such user: %s', userName)
                raise UserAuthError('No such user: %s' % userName)
            
            authMethod = self.authServices[user.authService]
            
            if authMethod.authentificate(user, userPassword):
                agentId = context.client.getId()
                self.agentUsers[agentId] = user.id
                
                roles = yield user.roles
                role = self._setupRoles(context.client, roles)
            
            userDescr = TSUserDescriptor()
            userDescr.name = user.gecosName
            userDescr.role = role
            
            returnValue(userDescr)
        
        return implementation(context, **kw)
    
    def _setupRoles(self, client, roles):
        # First pass - identify maximum role
        maxRole = TSServerClient.AUTH_NONE
        
        for role in roles:
            if role.role == 'admin':
                maxRole = TSServerClient.AUTH_ADMIN
            elif role.role == 'operator' and maxRole != TSServerClient.AUTH_ADMIN:
                maxRole = TSServerClient.AUTH_OPERATOR
            else:
                maxRole = TSServerClient.AUTH_USER
            
        
        client.authorize(maxRole)
        
        if maxRole != TSServerClient.AUTH_ADMIN:
            # TODO: For user/operator need to set ACLs
            pass
        
        return maxRole
    
    def onDisconnect(self):
        self.dbStore.close()