コード例 #1
0
    def test_version_change_success(self):
        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), __version__)
        store.close()
コード例 #2
0
    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val("export_template", "it")

        notification_l10n.set_val("export_template", "it", "")

        t1 = notification_l10n.get_val("export_template", "it")

        self.assertEqual(t1, "")

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = "2.XX.XX"
        prv.set_val("version", self.dummy_ver)
        self.assertEqual(prv.get_val("version"), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val("export_template", "it")
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
コード例 #3
0
    def test_mig_37_valid_tor_hs_key(self):
        self._initStartDB(36)

        from globaleaks.db.migrations import update_37
        t = update_37.TOR_DIR
        update_37.TOR_DIR = GLSettings.db_path

        pk_path = os.path.join(update_37.TOR_DIR, 'private_key')
        hn_path = os.path.join(update_37.TOR_DIR, 'hostname')

        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/private_key'), pk_path)
        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/hostname'), hn_path)

        ret = update_db()
        self.assertEqual(ret, None)

        new_uri = GLSettings.make_db_uri(
            os.path.join(GLSettings.db_path, GLSettings.db_file_name))
        store = Store(create_database(new_uri))
        hs = config.NodeFactory(store).get_val('onionservice')
        pk = config.PrivateFactory(store).get_val('tor_onion_key')

        self.assertEqual('lftx7dbyvlc5txtl.onion', hs)
        with open(os.path.join(helpers.DATA_DIR,
                               'tor/ephemeral_service_key')) as f:
            saved_key = f.read().strip()
        self.assertEqual(saved_key, pk)

        store.close()

        shutil.rmtree(GLSettings.db_path)
        update_37.TOR_DIR = t
コード例 #4
0
 def postconditions_34(self):
     store = Store(create_database(GLSettings.db_uri))
     notification_l10n = NotificationL10NFactory(store)
     x = notification_l10n.get_val(u'export_template', u'it')
     self.assertNotEqual(x, 'unmodifiable')
     store.commit()
     store.close()
コード例 #5
0
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        shutil.rmtree(GLSettings.db_path, True)
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val(u'version', self.dummy_ver)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop(
            'smtp_password')
        self.dp = u'yes_you_really_should_change_me'
コード例 #6
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def test_version_change_success(self):
        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), __version__)
        store.close()
コード例 #7
0
    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val('export_template', 'it')

        notification_l10n.set_val('export_template', 'it', '')

        t1 = notification_l10n.get_val('export_template', 'it')

        self.assertEqual(t1, '')

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val('export_template', 'it')
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
コード例 #8
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop('smtp_password')
        self.dp = u'yes_you_really_should_change_me'
コード例 #9
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def test_migration_error_with_removed_language(self):
        store = Store(create_database(GLSettings.db_uri))
        zyx = EnabledLanguage('zyx')
        store.add(zyx)
        store.commit()
        store.close()

        self.assertRaises(Exception, migration.perform_data_update, self.db_file)
コード例 #10
0
 def preconditions_34(self):
     store = Store(create_database(self.start_db_uri))
     notification_l10n = NotificationL10NFactory(store)
     notification_l10n.set_val(u'export_template', u'it', 'unmodifiable')
     x = notification_l10n.get_val(u'export_template', u'it')
     self.assertTrue(x, 'unmodifiable')
     store.commit()
     store.close()
コード例 #11
0
    def test_migration_error_with_removed_language(self):
        store = Store(create_database(GLSettings.db_uri))
        zyx = EnabledLanguage('zyx')
        store.add(zyx)
        store.commit()
        store.close()

        self.assertRaises(Exception, migration.perform_data_update,
                          self.db_file)
コード例 #12
0
    def test_ver_change_exception(self):
        # Explicity throw an exception in managed_ver_update via is_cfg_valid
        config.is_cfg_valid = apply_gen(throw_excep)

        self.assertRaises(IOError, migration.perform_data_update, self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.close()
コード例 #13
0
def perform_data_update(dbfile):
    new_tmp_store = Store(create_database(GLSettings.make_db_uri(dbfile)))
    try:
        db_perform_data_update(new_tmp_store)
        new_tmp_store.commit()
    except:
        new_tmp_store.rollback()
        raise
    finally:
        new_tmp_store.close()
コード例 #14
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def test_ver_change_exception(self):
        # Explicity throw an exception in managed_ver_update via is_cfg_valid
        config.is_cfg_valid = apply_gen(throw_excep)

        self.assertRaises(IOError, migration.perform_data_update, self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.close()
コード例 #15
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def test_version_change_not_ok(self):
        # Set is_config_valid to false  during managed ver update
        config.is_cfg_valid = apply_gen(mod_bool)

        self.assertRaises(Exception, migration.perform_data_update, self.db_file)

        # Ensure the rollback has succeeded
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.close()
コード例 #16
0
    def test_version_change_not_ok(self):
        # Set is_config_valid to false  during managed ver update
        config.is_cfg_valid = apply_gen(mod_bool)

        self.assertRaises(Exception, migration.perform_data_update,
                          self.db_file)

        # Ensure the rollback has succeeded
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.close()
コード例 #17
0
    def postconditions_36(self):
        new_uri = GLSettings.make_db_uri(
            os.path.join(GLSettings.db_path, GLSettings.db_file_name))
        store = Store(create_database(new_uri))
        hs = config.NodeFactory(store).get_val(u'onionservice')
        pk = config.PrivateFactory(store).get_val(u'tor_onion_key')

        self.assertEqual('lftx7dbyvlc5txtl.onion', hs)
        with open(os.path.join(helpers.DATA_DIR,
                               'tor/ephemeral_service_key')) as f:
            saved_key = f.read().strip()
        self.assertEqual(saved_key, pk)
        store.close()
コード例 #18
0
ファイル: test_migration.py プロジェクト: sshyran/GlobaLeaks
    def test_ver_change_exception(self):
        # Explicity throw an exception in managed_ver_update via is_cfg_valid
        config.is_cfg_valid = apply_gen(throw_excep)

        try:
            yield migration.perform_data_update(self.db_file)
            self.fail()
        except IOError as e:
            self.assertIsInstance(e, IOError)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.close()
コード例 #19
0
    def test_detect_and_fix_cfg_change(self):
        store = Store(create_database(GLSettings.db_uri))
        ret = config.is_cfg_valid(store)
        self.assertFalse(ret)
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val(u'version'), __version__)
        self.assertEqual(prv.get_val(u'xx_smtp_password'), self.dp)
        ret = config.is_cfg_valid(store)
        self.assertTrue(ret)
        store.close()
コード例 #20
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def test_detect_and_fix_cfg_change(self):
        store = Store(create_database(GLSettings.db_uri))
        ret = config.is_cfg_valid(store)
        self.assertFalse(ret)
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), __version__)
        self.assertEqual(prv.get_val('xx_smtp_password'), self.dp)
        ret = config.is_cfg_valid(store)
        self.assertTrue(ret)
        store.close()
コード例 #21
0
ファイル: test_migration.py プロジェクト: sshyran/GlobaLeaks
    def test_version_change_not_ok(self):
        # Set is_config_valid to false  during managed ver update
        config.is_cfg_valid = apply_gen(mod_bool)

        try:
            yield migration.perform_data_update(self.db_file)
            self.fail()
        except DatabaseIntegrityError as e:
            self.assertIsInstance(e, DatabaseIntegrityError)

        # Ensure the rollback has succeeded
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.close()
コード例 #22
0
ファイル: tables.py プロジェクト: Shadiesna/ooni-probe
def createTable(model, transactor, database):
    """
    Create the table for the specified model.
    Specification of a transactor and database is useful in unittesting.
    """
    if not transactor:
        from oonib.db import transactor
    if not database:
        from oonib.db import database
    store = Store(database)
    create_query = generateCreateQuery(model)
    try:
        store.execute(create_query)
    # XXX trap the specific error that is raised when the table exists
    except StormError, e:
        print "Failed to create table!"
        print e
        store.close()
コード例 #23
0
ファイル: tables.py プロジェクト: Shadiesna/ooni-probe
def createTable(model, transactor, database):
    """
    Create the table for the specified model.
    Specification of a transactor and database is useful in unittesting.
    """
    if not transactor:
        from oonib.db import transactor
    if not database:
        from oonib.db import database
    store = Store(database)
    create_query = generateCreateQuery(model)
    try:
        store.execute(create_query)
    # XXX trap the specific error that is raised when the table exists
    except StormError, e:
        print "Failed to create table!"
        print e
        store.close()
コード例 #24
0
class Controller :

    def __init__(self):
        self.DATABASE = None
        self.store = None

        self.openConnection()

    def openConnection(self):
        self.DATABASE = None
        self.store = None

        self.DATABASE = create_database('sqlite: mydata.db')
        self.store = Store(self.DATABASE)

    def closeConnection(self):
        self.store.commit()
        self.store.close()
コード例 #25
0
ファイル: tables.py プロジェクト: Afridocs/GLBackend
def createTable(model, transactor, database):
    """
    Create the table for the specified model.
    It will default to using globaleaks.db transactor and database if none is
    specified.
    Specification of a transactor and database is useful in unittesting.
    """
    if not transactor:
        from globaleaks.db import transactor
    if not database:
        from globaleaks.db import database
    store = Store(database)
    create_query = generateCreateQuery(model)

    try:
        store.execute(create_query)
    # XXX trap the specific error that is raised when the table exists
    # seem to be OperationalError raised, but not a specific error exists.
    except StormError, e:
        print "Failed to create table!", e
        store.close()
コード例 #26
0
def perform_schema_migration(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg("Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED)
        quit()

    tmpdir =  os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    new_db_file = None

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration prologue: %s" % exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg("Failure while migrating table %s: %s " % (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration epilogue: %s " % exception)
                    raise exception

            finally:
                # the database should be always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(create_database(GLSettings.make_db_uri(new_db_file)))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                     count = store_verify.find(migration_script.model_to[model_name]).count()
                     if migration_script.entries_count[model_name] != count:
                         if migration_script.fail_on_count_mismatch[model_name]:
                             raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                  (model_name, count, migration_script.entries_count[model_name]))
                         else:
                             GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                  (model_name, migration_script.entries_count[model_name], count))
                     else:
                         GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                              (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

        perform_data_update(new_db_file)
    except Exception as exception:
        # simply propagate the exception
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(new_db_file, final_db_file)
        security.overwrite_and_remove(orig_db_file)

    finally:
        # Always cleanup the temporary directory used for the migration
        for f in os.listdir(tmpdir):
            tmp_db_file = os.path.join(tmpdir, f)
            security.overwrite_and_remove(tmp_db_file)
        shutil.rmtree(tmpdir)
コード例 #27
0
 def close_connection(self):
     Store.close(tlocal.store)
     self.isClosed = True
コード例 #28
0
class TableReplacer:
    """
    This is the base class used by every Updater
    """
    def __init__(self, old_db_file, new_db_file, start_ver):

        from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5
        from globaleaks.db.update_6_7 import Node_version_6, Context_version_6
        from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \
            Receiver_version_7, InternalFile_version_7
        from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8
        from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \
            Receiver_version_9, User_version_9
        from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10
        from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [
                Node_version_5, Node_version_6, Node_version_7, Node_version_9,
                None, Node_version_11, None, models.Node
            ],
            'User': [
                User_version_5, User_version_9, None, None, None, models.User,
                None, None
            ],
            'Context': [
                Context_version_6, None, Context_version_7, Context_version_8,
                Context_version_11, None, None, models.Context
            ],
            'Receiver': [
                Receiver_version_7, None, None, Receiver_version_8,
                Receiver_version_9, models.Receiver, None, None
            ],
            'ReceiverFile':
            [models.ReceiverFile, None, None, None, None, None, None, None],
            'Notification': [
                Notification_version_7, None, None, Notification_version_8,
                models.Notification, None, None, None
            ],
            'Comment': [
                Comment_version_5, models.Comment, None, None, None, None,
                None, None
            ],
            'InternalTip': [
                InternalTip_version_10, None, None, None, None, None,
                models.InternalTip, None
            ],
            'InternalFile': [
                InternalFile_version_7, None, None, InternalFile_version_10,
                None, None, models.InternalFile, None
            ],
            'WhistleblowerTip': [
                models.WhistleblowerTip, None, None, None, None, None, None,
                None
            ],
            'ReceiverTip':
            [models.ReceiverTip, None, None, None, None, None, None, None],
            'ReceiverInternalTip': [
                models.ReceiverInternalTip, None, None, None, None, None, None,
                None
            ],
            'ReceiverContext':
            [models.ReceiverContext, None, None, None, None, None, None, None],
            'Message':
            [models.Message, None, None, None, None, None, None, None],
            'Stats': [models.Stats, None, None, None, None, None, None, None],
            'ApplicationData': [
                ApplicationData_version_10, None, None, None, None, None, None,
                models.ApplicationData
            ],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -5 because the relase 0,1,2,3,4 are not supported anymore
            assert len(v) == (DATABASE_VERSION + 1 - 5), \
                "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k)

        print "%s Opening old DB: %s" % (self.debug_info, old_db_file)
        old_database = create_database("sqlite:%s" % self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database("sqlite:%s" % new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:

            print "%s Acquire SQL schema %s" % (self.debug_info,
                                                GLSetting.db_schema_file)

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                print "Unable to access %s" % GLSetting.db_schema_file
                raise Exception("Unable to access db schema file")

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f.readlines()).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        print "OperationalError in [%s]" % create_query

            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                print "%s OperationalError in [%s]" % (self.debug_info,
                                                       create_query)
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 5)

        if not self.table_history.has_key(table_name):
            print "Not implemented usage of get_right_model %s (%s %d)" % (
                __file__, table_name, self.start_ver)
            raise NotImplementedError

        assert version <= DATABASE_VERSION, "wrong developer brainsync"

        if self.table_history[table_name][table_index]:
            # print "Immediate return %s = %s at version %d" % \
            #       ( table_name, self.table_history[table_name][table_index], version )
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        #
        # print "Requested version %d of %s need to be collected in the past" %\
        #       (version, table_name)

        while version >= 0:
            if self.table_history[table_name][table_index]:
                # print ".. returning %s = %s" %\
                #           ( table_name, self.table_history[table_name][table_index] )
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):

        print "%s default %s migration assistant: #%d" % (
            self.debug_info, table_name,
            self.store_old.find(
                self.get_right_model(table_name, self.start_ver)).count())

        old_objects = self.store_old.find(
            self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for k, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        print "%s default %s migration assistant" % (self.debug_info,
                                                     table_name)

        old_obj = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for k, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("ApplicationData")
コード例 #29
0
ファイル: base_updater.py プロジェクト: RuanAragao/GlobaLeaks
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """

    def __init__(self, old_db_file, new_db_file, start_ver):
        from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8
        from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9
        from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10
        from globaleaks.db.update_11_12 import Node_v_11, Context_v_11
        from globaleaks.db.update_12_13 import Node_v_12, Context_v_12
        from globaleaks.db.update_13_14 import Node_v_13, Context_v_13
        from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \
            InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14
        from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15
        from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16
        from globaleaks.db.update_17_18 import Node_v_17
        from globaleaks.db.update_18_19 import Node_v_18
        from globaleaks.db.update_19_20 import Node_v_19, Notification_v_19, Comment_v_19, Message_v_19, \
            InternalTip_v_19, ReceiverTip_v_19, InternalFile_v_19, ReceiverFile_v_19, Receiver_v_19, \
            Context_v_19

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13, Node_v_14, Node_v_16, None, Node_v_17,
                     Node_v_18, Node_v_19, models.Node],
            'User': [User_v_9, None, User_v_14, None, None, None, None, models.User, None, None, None, None, None],
            'Context': [Context_v_8, Context_v_11, None, None, Context_v_12, Context_v_13, Context_v_14, Context_v_19,
                        None, None, None, None, models.Context],
            'Receiver': [Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None, None, Receiver_v_15,
                         Receiver_v_16, Receiver_v_19, None, None, models.Receiver],
            'ReceiverFile': [ReceiverFile_v_19, None, None, None, None, None, None, None, None, None, None, None,
                             models.ReceiverFile],
            'Notification': [Notification_v_8, Notification_v_14, None, None, None, None, None, Notification_v_15,
                             Notification_v_16, Notification_v_19, None, None, models.Notification],
            'Comment': [Comment_v_14, None, None, None, None, None, None, Comment_v_19, None, None, None, None,
                        models.Comment],
            'InternalTip': [InternalTip_v_10, None, None, InternalTip_v_14, None, None, None, InternalTip_v_19, None,
                            None, None, None, models.InternalTip],
            'InternalFile': [InternalFile_v_10, None, None, InternalFile_v_19, None, None, None, None, None, None, None,
                             None, models.InternalFile],
            'WhistleblowerTip': [models.WhistleblowerTip, None, None, None, None, None, None, None, None, None, None,
                                 None, None],
            'ReceiverTip': [ReceiverTip_v_19, None, None, None, None, None, None, None, None, None, None, None,
                            models.ReceiverTip],
            'ReceiverInternalTip': [models.ReceiverInternalTip, None, None, None, None, None, None, None, None, None,
                                    None, None, None],
            'ReceiverContext': [models.ReceiverContext, None, None, None, None, None, None, None, None, None, None,
                                None, None],
            'Message': [Message_v_19, None, None, None, None, None, None, None, None, None, None, None, models.Message],
            'Stats': [Stats_v_14, None, None, None, None, None, None, Stats_v_16, None, models.Stats, None, None, None],
            'ApplicationData': [models.ApplicationData, None, None, None, None, None, None, None, None, None, None,
                                None, None],
            'Field': [models.Field, None, None, None, None, None, None, None, None, None, None, None, None],
            'FieldOption': [models.FieldOption, None, None, None, None, None, None, None, None, None, None, None, None],
            'FieldField': [models.FieldField, None, None, None, None, None, None, None, None, None, None, None, None],
            'Step': [models.Step, None, None, None, None, None, None, None, None, None, None, None, None],
            'StepField': [models.StepField, None, None, None, None, None, None, None, None, None, None, None, None],
            'Anomalies': [models.Anomalies, None, None, None, None, None, None, None, None, None, None, None, None],
            'EventLogs': [models.EventLogs, None, None, None, None, None, None, None, None, None, None, None, None],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 8)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError('Version supplied must be less or equal to {}'.format(
                DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        models_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)
        ).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, models_count))

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(self.debug_info, table_name))

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldOption(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        has been created between 15 and 16!
        should be dropped befor 20
        """
        if self.start_ver < 20:
            return

        self._perform_copy_list("EventLogs")
コード例 #30
0
class StormStorageBackend(StorageBackend):
    """Storage back-end based on the Storm ORM framework."""

    def __init__(self):
        self.store = None

    def set_config(self, **kwargs):
        """Set the configuration of this back-end."""
        uri = kwargs['uri']
        database = create_database(uri)
        self.store = Store(database)
        self.logger = logging.getLogger('StormStorageBackend')
        handler = logging.StreamHandler()
        formatter = logging.Formatter(kwargs['log_format'])
        handler.setFormatter(formatter)
        self.logger.addHandler(handler)
        self.logger.setLevel(
            logging.__getattribute__(kwargs['log_level']))

    def create_node(self, node, jid, node_config):
        """Create a PubSub node with the given configuration.

        Creates the Node, NodeConfig, Affiliation and Subscription model for
        the given node.
        """
        self.logger.debug('Creating node %s for jid %s with config %s' %
            (node, jid, node_config))
        new_node = Node(node)
        self.store.add(new_node)
        config = copy.deepcopy(DEFAULT_CONFIG)
        config.update(node_config)
        for key, value in config.items():
            new_node_config = NodeConfig(node, key, value)
            new_node_config.updated = datetime.utcnow()
            self.store.add(new_node_config)
        affiliation = Affiliation(node, jid, u'owner', datetime.utcnow())
        self.store.add(affiliation)
        subscription = Subscription(node, jid, jid, u'subscribed',
                datetime.utcnow())
        self.store.add(subscription)

    def create_channel(self, jid):
        """Create a channel for the given JID.

        Creates all the required PubSub nodes that constitute a channel, with
        the appropriate permissions.
        """
        self.logger.debug('Creating channel for %s' % jid)
        creation_date = unicode(datetime.utcnow().isoformat())
        self.create_node(u'/user/%s/posts' % jid, jid,
            {u'channelType': u'personal',
                u'creationDate': creation_date,
                u'defaultAffiliation': u'publisher',
                u'description': u'buddycloud channel for %s' % jid,
                u'title': jid})
        self.create_node(u'/user/%s/geo/current' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Where %s is at now' % jid,
                u'title': u'%s Current Location' % jid})
        self.create_node(u'/user/%s/geo/next' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Where %s intends to go' % jid,
                u'title': u'%s Next Location' % jid})
        self.create_node(u'/user/%s/geo/previous' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Where %s has been before' % jid,
                u'title': u'%s Previous Location' % jid})
        self.create_node(u'/user/%s/status' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'M000D',
                u'title': u'%s status updates' % jid})
        self.create_node(u'/user/%s/subscriptions' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Browse my interests',
                u'title': u'%s subscriptions' % jid})
        self.store.commit()

    def get_node(self, node):
        """Get the requested PubSub node."""
        self.logger.debug('Getting node %s' % node)
        the_node = self.store.get(Node, node)
        self.logger.debug('Returning node %s' % the_node)
        return the_node

    def get_nodes(self):
        """Get a list of all the available PubSub nodes."""
        self.logger.debug('Getting list of available nodes.')
        node_list = self.store.find(Node)
        self.logger.debug('Returning list of available node %s' % node_list)
        return node_list

    def add_item(self, node, item_id, item):
        """Add an item to the requested PubSub node."""
        new_item = Item(node, unicode(item_id), datetime.utcnow(), item)
        self.store.add(new_item)
        self.store.commit()

    def shutdown(self):
        """Shut down this storage module - flush, commit and close the
        store."""
        self.store.flush()
        self.store.commit()
        self.store.close()
コード例 #31
0
ファイル: db.py プロジェクト: scarpentier/askgod
def db_update_schema(database):
    """
        Check for pending database schema updates.
        If any are found, apply them and bump the version.
    """

    # Connect to the database
    db_store = Store(database)

    # Check if the DB schema has been loaded
    db_exists = False
    try:
        db_store.execute(Select(DBSchema.version))
        db_exists = True
    except:
        db_store.rollback()
        logging.debug("Failed to query schema table.")

    if not db_exists:
        logging.info("Creating database")
        schema_file = sorted(glob.glob("schema/schema-*.sql"))[-1]
        schema_version = schema_file.split(".")[0].split("-")[-1]
        logging.debug("Using '%s' to deploy schema '%s'" % (schema_file,
                                                            schema_version))
        with open(schema_file, "r") as fd:
            try:
                for line in fd.read().replace("\n", "").split(";"):
                    if not line:
                        continue
                    db_store.execute("%s;" % line)
                    db_commit(db_store)
                logging.info("Database created")
            except:
                logging.critical("Failed to initialize the database")
                return False

    # Get schema version
    version = db_store.execute(Select(Max(DBSchema.version))).get_one()[0]
    if not version:
        logging.critical("No schema version.")
        return False

    # Apply updates
    for update_file in sorted(glob.glob("schema/update-*.sql")):
        update_version = update_file.split(".")[0].split("-")[-1]
        if int(update_version) > version:
            logging.info("Using '%s' to deploy update '%s'" % (update_file,
                                                               update_version))
            with open(update_file, "r") as fd:
                try:
                    for line in fd.read().replace("\n", "").split(";"):
                        if not line:
                            continue
                        db_store.execute("%s;" % line)
                        db_commit(db_store)
                except:
                    logging.critical("Failed to load schema update")
                    return False

    # Get schema version
    new_version = db_store.execute(Select(Max(DBSchema.version))).get_one()[0]
    if new_version > version:
        logging.info("Database schema successfuly updated from '%s' to '%s'" %
                     (version, new_version))

    db_store.close()
コード例 #32
0
class StormManager(Singleton):
    log = logging.getLogger('{}.StormManager'.format(__name__))

    def __init__(self):
        pass

    @loggingInfo
    def init(self, *args):
        self.dbOK = False
        self.openDB()

    @loggingInfo
    def reset(self):
        self.closeDB()
        self.openDB()

    @loggingInfo
    def openDB(self):
        try:
            self._config = ConfigManager()
            self.db = self._config.config[self._config.database]["database"]
            create_db = False
            if self.db == self._config.Sqlite:
                folder = self._config.config[self._config.database]["folder"]
                loc = folder + '/icepapcms.db'
                print("Using Sqlite database at %s" % loc)
                create_db = not os.path.exists(loc)
                if create_db:
                    print("No database file found, creating it")
                    if not os.path.exists(folder):
                        os.mkdir(folder)
                self._database = create_database("%s:%s" % (self.db, loc))
            else:
                server = self._config.config[self._config.database]["server"]
                user = self._config.config[self._config.database]["user"]
                pwd = self._config.config[self._config.database]["password"]
                scheme = "{}://{}:{}@{}/icepapcms".format(
                    self.db, user, pwd, server)

                if self.db == 'mysql':
                    self._database = MySQL(scheme)
                else:
                    self._database = create_database(scheme)

            self._store = Store(self._database)
            if create_db:
                self.dbOK = self.createSqliteDB()
            else:
                self.dbOK = True
        except Exception as e:
            self.log.error("Unexpected error on openDB: %s", e)
            self.dbOK = False

    @loggingInfo
    def createSqliteDB(self):
        try:
            sql_file = resource_filename('icepapcms.db', 'creates_sqlite.sql')
            with open(sql_file) as f:
                sql_script = f.read()
            statements = re.compile(r";[ \t]*$", re.M)

            for statement in statements.split(sql_script):
                # Remove any comments from the file
                statement = re.sub(r"--.*[\n\\Z]", "", statement)
                if statement.strip():
                    create = statement + ";"
                    self._store.execute(create)
            self._store.commit()
            return True
        except Exception as e:
            self.log.error("Unexpected error on createSqliteDB: %s", e)
            return False

    @loggingInfo
    def closeDB(self):
        try:
            if self.dbOK:
                self._store.close()
            return True
        except Exception as e:
            self.log.error("Unexpected error on closeDB:", e)
            self.dbOK = False
            return False

    @loggingInfo
    def store(self, obj):
        self._store.add(obj)

    @loggingInfo
    def remove(self, obj):
        self._store.remove(obj)

    @loggingInfo
    def addIcepapSystem(self, icepap_system):
        try:
            self._store.add(icepap_system)
            self.commitTransaction()
            return True
        except Exception as e:
            self.log.error(
                "some exception trying to store the icepap system "
                "%s: %s", icepap_system, e)
            return False

    @loggingInfo
    def deleteLocation(self, location):
        if self.db == self._config.Sqlite:
            for system in location.systems:
                self.deleteIcepapSystem(system)
        self._store.remove(location)
        self.commitTransaction()

    @loggingInfo
    def deleteIcepapSystem(self, icepap_system):
        if self.db == self._config.Sqlite:
            for driver in icepap_system.drivers:
                self.deleteDriver(driver)
        self._store.remove(icepap_system)
        self.commitTransaction()

    @loggingInfo
    def deleteDriver(self, driver):

        for cfg in driver.historic_cfgs:
            for par in cfg.parameters:
                self._store.remove(par)
            self._store.remove(cfg)
        self._store.remove(driver)
        self.commitTransaction()

    @loggingInfo
    def getAllLocations(self):
        try:
            locations = self._store.find(Location)
            location_dict = {}
            for location in locations:
                location_dict[location.name] = location
            return location_dict
        except Exception as e:
            self.log.error("Unexpected error on getAllLocations: %s", e)
            return {}

    @loggingInfo
    def getLocation(self, name):
        return self._store.get(Location, name)

    @loggingInfo
    def getIcepapSystem(self, icepap_name):
        return self._store.get(IcepapSystem, icepap_name)

    @loggingInfo
    def existsDriver(self, mydriver, id):

        drivers = self._store.find(
            IcepapDriver, IcepapDriver.addr == IcepapDriverCfg.driver_addr,
            IcepapDriverCfg.id == CfgParameter.cfg_id,
            CfgParameter.name == str("ID"), CfgParameter.value == id)
        if drivers:
            for driver in drivers:
                if driver.addr != mydriver.addr:
                    return driver
            return None
        else:
            return None

    @loggingInfo
    def getLocationIcepapSystem(self, location):
        try:
            icepaps = self._store.find(IcepapSystem,
                                       IcepapSystem.location_name == location)
            icepaps.order_by(IcepapSystem.name)
            ipapdict = {}
            for ipap_sys in icepaps:
                ipapdict[ipap_sys.name] = ipap_sys
            return ipapdict
        except Exception as e:
            self.log.error(
                "Unexpected error on getLocationIcepapSystem: "
                "%s", e)
            return {}

    @loggingInfo
    def rollback(self):
        self._store.rollback()

    @loggingInfo
    def commitTransaction(self):
        try:
            self._store.commit()
            return True
        except Exception:
            return False
コード例 #33
0
ファイル: base_updater.py プロジェクト: keichacon/GlobaLeaks
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """
    def __init__(self, old_db_file, new_db_file, start_ver):
        from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8
        from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9
        from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10
        from globaleaks.db.update_11_12 import Node_v_11, Context_v_11
        from globaleaks.db.update_12_13 import Node_v_12, Context_v_12
        from globaleaks.db.update_13_14 import Node_v_13, Context_v_13
        from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \
            InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14
        from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15
        from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16
        from globaleaks.db.update_17_18 import Node_v_17
        from globaleaks.db.update_18_19 import Node_v_18

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [
                Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13,
                Node_v_14, Node_v_16, None, Node_v_17, Node_v_18, models.Node
            ],
            'User': [
                User_v_9, None, User_v_14, None, None, None, None, models.User,
                None, None, None, None
            ],
            'Context': [
                Context_v_8, Context_v_11, None, None, Context_v_12,
                Context_v_13, Context_v_14, models.Context, None, None, None,
                None
            ],
            'Receiver': [
                Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None,
                None, Receiver_v_15, Receiver_v_16, models.Receiver, None, None
            ],
            'ReceiverFile': [
                models.ReceiverFile, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Notification': [
                Notification_v_8, Notification_v_14, None, None, None, None,
                None, Notification_v_15, Notification_v_16,
                models.Notification, None, None
            ],
            'Comment': [
                Comment_v_14, None, None, None, None, None, None,
                models.Comment, None, None, None, None
            ],
            'InternalTip': [
                InternalTip_v_10, None, None, InternalTip_v_14, None, None,
                None, models.InternalTip, None, None, None, None
            ],
            'InternalFile': [
                InternalFile_v_10, None, None, models.InternalFile, None, None,
                None, None, None, None, None, None
            ],
            'WhistleblowerTip': [
                models.WhistleblowerTip, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'ReceiverTip': [
                models.ReceiverTip, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'ReceiverInternalTip': [
                models.ReceiverInternalTip, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'ReceiverContext': [
                models.ReceiverContext, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'Message': [
                models.Message, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'Stats': [
                Stats_v_14, None, None, None, None, None, None, Stats_v_16,
                None, models.Stats, None, None
            ],
            'ApplicationData': [
                models.ApplicationData, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'Field': [
                models.Field, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'FieldOption': [
                models.FieldOption, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'FieldField': [
                models.FieldField, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Step': [
                models.Step, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'StepField': [
                models.StepField, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Anomalies': [
                models.Anomalies, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'EventLogs': [
                models.EventLogs, None, None, None, None, None, None, None,
                None, None, None, None
            ],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(
                    length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(
                self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg(
                            'OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(
                    self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 8)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError(
                'Version supplied must be less or equal to {}'.format(
                    DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        models_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, models_count))

        old_objects = self.store_old.find(
            self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(
            self.debug_info, table_name))

        old_obj = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldOption(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        has been created between 15 and 16!
        """
        if self.start_ver < 16:
            return

        self._perform_copy_list("EventLogs")
コード例 #34
0
def perform_version_update(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg("Migrations from DB version lower than %d are no more supported!" % FIRST_DATABASE_VERSION_SUPPORTED)
        GLSettings.print_msg("If you can't create your Node from scratch, contact us asking for support.")
        quit()

    tmpdir =  os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration prologue: %s" % exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg("Failure while migrating table %s: %s " % (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration epilogue: %s " % exception)
                    raise exception

            finally:
                # the database should bee always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(create_database('sqlite:' + new_db_file))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                     count = store_verify.find(migration_script.model_to[model_name]).count()
                     if migration_script.entries_count[model_name] != count:
                         if migration_script.fail_on_count_mismatch[model_name]:
                             raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                  (model_name, count, migration_script.entries_count[model_name]))
                         else:
                             GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                  (model_name, migration_script.entries_count[model_name], count))
                     else:
                         GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                              (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

    except Exception as exception:
        # simply propagage the exception
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % DATABASE_VERSION)), final_db_file)
        os.remove(orig_db_file)

    finally:
        # always cleanup the temporary directory used for the migration
        shutil.rmtree(tmpdir, True)
コード例 #35
0
class TableReplacer:
    """
    This is the base class used by every Updater
    """

    def __init__(self, old_db_file, new_db_file, start_ver):

        from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5
        from globaleaks.db.update_6_7 import Node_version_6, Context_version_6
        from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \
            Receiver_version_7, InternalFile_version_7
        from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8
        from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \
            Receiver_version_9, User_version_9
        from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10
        from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node' : [ Node_version_5, Node_version_6, Node_version_7, Node_version_9, None, Node_version_11, None, models.Node],
            'User' : [ User_version_5, User_version_9, None, None, None, models.User, None, None],
            'Context' : [ Context_version_6, None, Context_version_7, Context_version_8, Context_version_11, None, None, models.Context],
            'Receiver': [ Receiver_version_7, None, None, Receiver_version_8, Receiver_version_9, models.Receiver, None, None],
            'ReceiverFile' : [ models.ReceiverFile, None, None, None, None, None, None, None],
            'Notification': [ Notification_version_7, None, None, Notification_version_8, models.Notification, None, None, None],
            'Comment': [ Comment_version_5, models.Comment, None, None, None, None, None, None],
            'InternalTip' : [ InternalTip_version_10, None, None, None, None, None, models.InternalTip, None],
            'InternalFile' : [ InternalFile_version_7, None, None, InternalFile_version_10, None, None, models.InternalFile, None],
            'WhistleblowerTip' : [ models.WhistleblowerTip, None, None, None, None, None, None, None],
            'ReceiverTip' : [ models.ReceiverTip, None, None, None, None, None, None , None],
            'ReceiverInternalTip' : [ models.ReceiverInternalTip, None, None, None, None, None, None, None],
            'ReceiverContext' : [ models.ReceiverContext, None, None, None, None, None, None, None],
            'Message' : [ models.Message, None, None, None, None, None, None, None],
            'Stats' : [models.Stats, None, None, None, None, None, None, None],
            'ApplicationData' : [ApplicationData_version_10, None, None, None, None, None, None, models.ApplicationData],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -5 because the relase 0,1,2,3,4 are not supported anymore
            assert len(v) == (DATABASE_VERSION + 1 - 5), \
                "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k)

        print "%s Opening old DB: %s" % (self.debug_info, old_db_file)
        old_database = create_database("sqlite:%s" % self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database("sqlite:%s" % new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:

            print "%s Acquire SQL schema %s" % (self.debug_info, GLSetting.db_schema_file)

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                print "Unable to access %s" % GLSetting.db_schema_file
                raise Exception("Unable to access db schema file")

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f.readlines()).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query+';')
                    except OperationalError:
                        print "OperationalError in [%s]" % create_query

            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver +1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query+';')
            except OperationalError as excep:
                print "%s OperationalError in [%s]" % (self.debug_info, create_query)
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 5)

        if not self.table_history.has_key(table_name):
            print "Not implemented usage of get_right_model %s (%s %d)" % (
                __file__, table_name, self.start_ver)
            raise NotImplementedError

        assert version <= DATABASE_VERSION, "wrong developer brainsync"

        if self.table_history[table_name][table_index]:
            # print "Immediate return %s = %s at version %d" % \
            #       ( table_name, self.table_history[table_name][table_index], version )
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        #
        # print "Requested version %d of %s need to be collected in the past" %\
        #       (version, table_name)

        while version >= 0:
            if self.table_history[table_name][table_index]:
            # print ".. returning %s = %s" %\
            #           ( table_name, self.table_history[table_name][table_index] )
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):

        print "%s default %s migration assistant: #%d" % (
            self.debug_info, table_name,
            self.store_old.find(self.get_right_model(table_name, self.start_ver)).count())

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for k, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name) )

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        print "%s default %s migration assistant" % (self.debug_info, table_name)

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for k, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name) )

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("ApplicationData")
コード例 #36
0
ファイル: base_updater.py プロジェクト: nsfw/GlobaLeaks
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """

    def __init__(self, table_history, old_db_file, new_db_file, start_ver):
        self.table_history = table_history
        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        for k, v in table_history.iteritems():
            length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSettings.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSettings.db_schema_file))

            if not os.access(GLSettings.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSettings.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSettings.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):
        table_index = (version - FIRST_DATABASE_VERSION_SUPPORTED)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError('Version supplied must be less or equal to {}'.format(
                DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        objs_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)
        ).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, objs_count))

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(self.debug_info, table_name))

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        Stats has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        Field has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldAttr(self):
        """
        FieldAttr has been created between 22 and 23!
        """
        if self.start_ver < 23:
            return

        self._perform_copy_list("FieldAttr")

    def migrate_FieldOption(self):
        """
        FieldOption has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        FieldField has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        Step has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        StepField has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        Anomalies has been created between 14 and 15!
        should be dropped before 22
        """
        if self.start_ver < 23:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        EventLogs has been created between 15 and 16!
        should be dropped before 20
        """
        if self.start_ver < 20:
            return

        self._perform_copy_list("EventLogs")