コード例 #1
0
 def postconditions_34(self):
     store = Store(create_database(GLSettings.db_uri))
     notification_l10n = NotificationL10NFactory(store)
     x = notification_l10n.get_val(u'export_template', u'it')
     self.assertNotEqual(x, 'unmodifiable')
     store.commit()
     store.close()
コード例 #2
0
    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val('export_template', 'it')

        notification_l10n.set_val('export_template', 'it', '')

        t1 = notification_l10n.get_val('export_template', 'it')

        self.assertEqual(t1, '')

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val('export_template', 'it')
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
コード例 #3
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop('smtp_password')
        self.dp = u'yes_you_really_should_change_me'
コード例 #4
0
    def setUp(self):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        shutil.rmtree(GLSettings.db_path, True)
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % DATABASE_VERSION
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val(u'version', self.dummy_ver)
        self.assertEqual(prv.get_val(u'version'), self.dummy_ver)
        store.commit()
        store.close()

        # backup various mocks that we will use
        self._bck_f = config.is_cfg_valid
        GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop(
            'smtp_password')
        self.dp = u'yes_you_really_should_change_me'
コード例 #5
0
ファイル: base.py プロジェクト: trevor/mailman3
 def initialize(self, debug=None):
     """See `IDatabase`."""
     # Calculate the engine url.
     url = expand(config.database.url, config.paths)
     log.debug('Database url: %s', url)
     # XXX By design of SQLite, database file creation does not honor
     # umask.  See their ticket #1193:
     # http://www.sqlite.org/cvstrac/tktview?tn=1193,31
     #
     # This sucks for us because the mailman.db file /must/ be group
     # writable, however even though we guarantee our umask is 002 here, it
     # still gets created without the necessary g+w permission, due to
     # SQLite's policy.  This should only affect SQLite engines because its
     # the only one that creates a little file on the local file system.
     # This kludges around their bug by "touch"ing the database file before
     # SQLite has any chance to create it, thus honoring the umask and
     # ensuring the right permissions.  We only try to do this for SQLite
     # engines, and yes, we could have chmod'd the file after the fact, but
     # half dozen and all...
     self.url = url
     self._prepare(url)
     database = create_database(url)
     store = Store(database, GenerationalCache())
     database.DEBUG = (as_boolean(config.database.debug)
                       if debug is None else debug)
     self.store = store
     store.commit()
コード例 #6
0
    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val("export_template", "it")

        notification_l10n.set_val("export_template", "it", "")

        t1 = notification_l10n.get_val("export_template", "it")

        self.assertEqual(t1, "")

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = "2.XX.XX"
        prv.set_val("version", self.dummy_ver)
        self.assertEqual(prv.get_val("version"), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val("export_template", "it")
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
コード例 #7
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
    def test_migration_error_with_removed_language(self):
        store = Store(create_database(GLSettings.db_uri))
        zyx = EnabledLanguage('zyx')
        store.add(zyx)
        store.commit()
        store.close()

        self.assertRaises(Exception, migration.perform_data_update, self.db_file)
コード例 #8
0
 def preconditions_34(self):
     store = Store(create_database(self.start_db_uri))
     notification_l10n = NotificationL10NFactory(store)
     notification_l10n.set_val(u'export_template', u'it', 'unmodifiable')
     x = notification_l10n.get_val(u'export_template', u'it')
     self.assertTrue(x, 'unmodifiable')
     store.commit()
     store.close()
コード例 #9
0
    def test_migration_error_with_removed_language(self):
        store = Store(create_database(GLSettings.db_uri))
        zyx = EnabledLanguage('zyx')
        store.add(zyx)
        store.commit()
        store.close()

        self.assertRaises(Exception, migration.perform_data_update,
                          self.db_file)
コード例 #10
0
def perform_data_update(dbfile):
    new_tmp_store = Store(create_database(GLSettings.make_db_uri(dbfile)))
    try:
        db_perform_data_update(new_tmp_store)
        new_tmp_store.commit()
    except:
        new_tmp_store.rollback()
        raise
    finally:
        new_tmp_store.close()
コード例 #11
0
def do_statspollute(dbfile):

    # source
    gl_database = create_database("sqlite:%s" % dbfile)
    source_store = Store(gl_database)

    stats = source_store.find(models.Stats)

    counter = 0
    for s in stats:
        source_store.remove(s)
        counter += 1

    print "removed %d entry in stats" % counter

    counter = 0
    # 21 days in the past
    for past_hours in xrange(24 * 7 * 3):
        past_hours += 4
        when = utc_past_date(hours=past_hours)

        newstat = models.Stats()
        newstat.freemb = randint(1000, 1050)
        newstat.year = when.isocalendar()[0]
        newstat.week = when.isocalendar()[1]

        level = round((randint(0, 1000) / 240.0), 1) - 2

        def random_pollution():
            return int(randint(0,11) + (5 * level))

        activity_fake = {
            'successfull_logins': random_pollution(),
            'failed_logins': random_pollution(),
            'started_submissions': random_pollution(),
            'completed_submissions': random_pollution(),
            'uploaded_files': int(randint(0,11) + (5  * level)),
            'appended_files': random_pollution(),
            'wb_comments': random_pollution(),
            'wb_messages': random_pollution(),
            'receiver_comments': random_pollution(),
            'receiver_messages': random_pollution()
        }

        for k, v in activity_fake.iteritems():
            if v < 0:
                activity_fake[k] = 0

        newstat.start = when
        newstat.summary = activity_fake
        counter += 1
        source_store.add(newstat)

    print "Committing %d stats" % counter
    source_store.commit()
コード例 #12
0
    def test_trim_value_to_range(self):
        store = Store(create_database(GLSettings.db_uri))

        nf = config.NodeFactory(store)
        fake_cfg = nf.get_cfg(u'wbtip_timetolive')

        self.assertRaises(errors.InvalidModelInput, fake_cfg.set_v, 3650)

        fake_cfg.value = {'v': 3650}
        store.commit()

        MigrationBase.trim_value_to_range(nf, u'wbtip_timetolive')
        self.assertEqual(nf.get_val(u'wbtip_timetolive'), 365 * 2)
コード例 #13
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        """Test creation of new VideoFile"""

        store = Store(self.db)
        videofile = models.VideoFile()
        videofile.filename = u"/home/user/foo.mpg"
        store.add(videofile)
        store.commit()

        self.assertTrue(Store.of(videofile) is store)

        videofile_from_database = store.find(models.VideoFile, models.VideoFile.filename == u"/home/user/foo.mpg").one()
        self.assertTrue(videofile is videofile_from_database)
コード例 #14
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        """Test creation of new MusicAlbum"""

        store = Store(self.db)
        musicalbum = models.MusicAlbum()
        musicalbum.title = u"The Lady Dance"
        store.add(musicalbum)
        store.commit()

        self.assertTrue(Store.of(musicalbum) is store)

        musicalbum_from_database = store.find(models.MusicAlbum, models.MusicAlbum.title == u"The Lady Dance").one()
        self.assertTrue(musicalbum is musicalbum_from_database)
コード例 #15
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        """Test creation of new MusicTrack"""

        store = Store(self.db)
        musictrack = models.MusicTrack()
        musictrack.title = u"The Beautiful Ones"
        store.add(musictrack)
        store.commit()

        self.assertTrue(Store.of(musictrack) is store)

        musictrack_from_database = store.find(models.MusicTrack, models.MusicTrack.title == u"The Beautiful Ones").one()
        self.assertTrue(musictrack is musictrack_from_database)
コード例 #16
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        """Test creation of new PhotoAlbum"""

        store = Store(self.db)
        photoalbum = models.PhotoAlbum()
        photoalbum.title = u"Photo Album Title"
        photoalbum.description = u"This is a photo description"
        store.add(photoalbum)
        store.commit()

        self.assertTrue(Store.of(photoalbum) is store)

        photoalbum_from_database = store.find(models.PhotoAlbum, models.PhotoAlbum.title == u"Photo Album Title").one()
        self.assertTrue(photoalbum is photoalbum_from_database)
コード例 #17
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        '''Test creation of new MusicTrack'''

        store = Store(self.db)
        musictrack = models.MusicTrack()
        musictrack.title = u'The Beautiful Ones'
        store.add(musictrack)
        store.commit()

        self.assertTrue(Store.of(musictrack) is store)

        musictrack_from_database = store.find(
            models.MusicTrack,
            models.MusicTrack.title == u'The Beautiful Ones').one()
        self.assertTrue(musictrack is musictrack_from_database)
コード例 #18
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        '''Test creation of new MusicAlbum'''

        store = Store(self.db)
        musicalbum = models.MusicAlbum()
        musicalbum.title = u'The Lady Dance'
        store.add(musicalbum)
        store.commit()

        self.assertTrue(Store.of(musicalbum) is store)

        musicalbum_from_database = store.find(
            models.MusicAlbum,
            models.MusicAlbum.title == u'The Lady Dance').one()
        self.assertTrue(musicalbum is musicalbum_from_database)
コード例 #19
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        '''Test creation of new PhotoImage'''

        store = Store(self.db)
        photoimage = models.PhotoImage()
        photoimage.filename = u'/home/user/photo.jpg'
        store.add(photoimage)
        store.commit()

        self.assertTrue(Store.of(photoimage) is store)

        photoimage_from_database = store.find(
            models.PhotoImage,
            models.PhotoImage.filename == u'/home/user/photo.jpg').one()
        self.assertTrue(photoimage is photoimage_from_database)
コード例 #20
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        '''Test creation of new VideoFile'''

        store = Store(self.db)
        videofile = models.VideoFile()
        videofile.filename = u'/home/user/foo.mpg'
        store.add(videofile)
        store.commit()

        self.assertTrue(Store.of(videofile) is store)

        videofile_from_database = store.find(
            models.VideoFile,
            models.VideoFile.filename == u'/home/user/foo.mpg').one()
        self.assertTrue(videofile is videofile_from_database)
コード例 #21
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        '''Test creation of new MusicPlaylist'''

        store = Store(self.db)
        musicplaylist = models.MusicPlaylist()
        musicplaylist.title = u'The Ultimate Heavy Metal Goth Emo'
        store.add(musicplaylist)
        store.commit()

        self.assertTrue(Store.of(musicplaylist) is store)

        musicplaylist_from_database = store.find(models.MusicPlaylist,
            models.MusicPlaylist.title == \
                u'The Ultimate Heavy Metal Goth Emo').one()
        self.assertTrue(musicplaylist is musicplaylist_from_database)
コード例 #22
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        """Test creation of new MusicPlaylist"""

        store = Store(self.db)
        musicplaylist = models.MusicPlaylist()
        musicplaylist.title = u"The Ultimate Heavy Metal Goth Emo"
        store.add(musicplaylist)
        store.commit()

        self.assertTrue(Store.of(musicplaylist) is store)

        musicplaylist_from_database = store.find(
            models.MusicPlaylist, models.MusicPlaylist.title == u"The Ultimate Heavy Metal Goth Emo"
        ).one()
        self.assertTrue(musicplaylist is musicplaylist_from_database)
コード例 #23
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        """Test creation of new PhotoImage"""

        store = Store(self.db)
        photoimage = models.PhotoImage()
        photoimage.filename = u"/home/user/photo.jpg"
        store.add(photoimage)
        store.commit()

        self.assertTrue(Store.of(photoimage) is store)

        photoimage_from_database = store.find(
            models.PhotoImage, models.PhotoImage.filename == u"/home/user/photo.jpg"
        ).one()
        self.assertTrue(photoimage is photoimage_from_database)
コード例 #24
0
ファイル: test_models.py プロジェクト: tiwilliam/entertainer
    def testCreate(self):
        '''Test creation of new PhotoAlbum'''

        store = Store(self.db)
        photoalbum = models.PhotoAlbum()
        photoalbum.title = u'Photo Album Title'
        photoalbum.description = u'This is a photo description'
        store.add(photoalbum)
        store.commit()

        self.assertTrue(Store.of(photoalbum) is store)

        photoalbum_from_database = store.find(
            models.PhotoAlbum,
            models.PhotoAlbum.title == u'Photo Album Title').one()
        self.assertTrue(photoalbum is photoalbum_from_database)
コード例 #25
0
 def commit(self):
     if self.transaction:
         self.transaction = None
         return self.transaction.commit()
     result = Store.commit(self)
     #Store.reset(self)
     return result
コード例 #26
0
ファイル: connection.py プロジェクト: tiwilliam/entertainer
    def _create(self):
        '''Create a new entertainer database

        Reads the current database schema dictionary, and creates the sqlite
        database based on that schema
        '''

        store = Store(self._db)
        store.execute("""
        CREATE TABLE `entertainer_data` (
            name VARCHAR PRIMARY KEY,
            value VARCHAR);""")
        store.execute(
            "INSERT INTO `entertainer_data` VALUES ('version', '0.2a');")

        for query in SCHEMA.itervalues():
            store.execute(query, noresult=True)
            store.commit()
コード例 #27
0
ファイル: connection.py プロジェクト: tiwilliam/entertainer
    def _create(self):
        '''Create a new entertainer database

        Reads the current database schema dictionary, and creates the sqlite
        database based on that schema
        '''

        store = Store(self._db)
        store.execute("""
        CREATE TABLE `entertainer_data` (
            name VARCHAR PRIMARY KEY,
            value VARCHAR);""")
        store.execute(
        "INSERT INTO `entertainer_data` VALUES ('version', '0.2a');")

        for query in SCHEMA.itervalues():
            store.execute(query, noresult=True)
            store.commit()
コード例 #28
0
class Controller :

    def __init__(self):
        self.DATABASE = None
        self.store = None

        self.openConnection()

    def openConnection(self):
        self.DATABASE = None
        self.store = None

        self.DATABASE = create_database('sqlite: mydata.db')
        self.store = Store(self.DATABASE)

    def closeConnection(self):
        self.store.commit()
        self.store.close()
コード例 #29
0
ファイル: model.py プロジェクト: MarSik/elshelves
def getStore(url, create = False):
    # register new Storm scheme
    register_scheme("sqlitefk", ForeignKeysSQLite)

    d = create_database(url)
    s = Store(d)

    if create:
        schema = file(os.path.join(os.path.dirname(__file__), "schema.sql"), "r").read().split("\n\n")
        for cmd in schema:
            s.execute(cmd)

        version = Meta()
        version.key = u"created"
        s.add(version)

        s.commit()

    return s
コード例 #30
0
    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch", )], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))
コード例 #31
0
ファイル: testing.py プロジェクト: DamnWidget/mamba-storm
    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch",)], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))
コード例 #32
0
ファイル: testing.py プロジェクト: datnguyen0606/storm
class ZStormResourceManagerTest(TestHelper):

    def is_supported(self):
        return has_transaction and has_zope_component and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        self._package_dir = self.makeDir()
        sys.path.append(self._package_dir)
        patch_dir = os.path.join(self._package_dir, "patch_package")
        os.mkdir(patch_dir)
        self.makeFile(path=os.path.join(patch_dir, "__init__.py"), content="")
        self.makeFile(path=os.path.join(patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema(create, drop, delete, patch_package)
        self.databases = [{"name": "test", "uri": uri, "schema": schema}]
        self.resource = ZStormResourceManager(self.databases)
        self.store = Store(create_database(uri))

    def tearDown(self):
        del sys.modules["patch_package"]
        sys.path.remove(self._package_dir)
        if "patch_1" in sys.modules:
            del sys.modules["patch_1"]
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_make_zstorm_overwritten(self):
        """
        L{ZStormResourceManager.make} registers its own ZStorm again if a test
        has registered a new ZStorm utility overwriting the resource one.
        """
        zstorm = self.resource.make([])
        provideUtility(ZStorm())
        self.resource.make([])
        self.assertIs(zstorm, getUtility(IZStorm))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """
        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_clean_with_force_delete(self):
        """
        If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete}
        is always invoked upon test cleanup.
        """
        zstorm = self.resource.make([])
        self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        self.store.commit()
        self.resource.force_delete = True
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_wb_clean_clears_alive_cache_before_abort(self):
        """
        L{ZStormResourceManager.clean} clears the alive cache before
        aborting the transaction.
        """
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(1))
        store.add(Test(2))
        real_invalidate = store.invalidate

        def invalidate_proxy():
            self.assertEqual(0, len(store._alive.values()))
            real_invalidate()
        store.invalidate = invalidate_proxy

        self.resource.clean(zstorm)

    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch",)], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))

    def test_deprecated_database_format(self):
        """
        The old deprecated format of the 'database' constructor parameter is
        still supported.
        """
        import patch_package
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema([], [], [], patch_package)
        resource = ZStormResourceManager({"test": (uri, schema)})
        zstorm = resource.make([])
        store = zstorm.get("test")
        self.assertIsNot(None, store)
コード例 #33
0
ファイル: logtable.py プロジェクト: MetricsGrimoire/Bicho
class IssuesLog():

    def __init__(self, backend_name):
        self.backend_name = backend_name
        self.connect()
        self.create_db()

    def connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':'
                                        + opts.db_password_out + '@'
                                        + opts.db_hostname_out + ':'
                                        + opts.db_port_out + '/'
                                        + opts.db_database_out)
        self.store = Store(self.database)

    def create_db(self):
        print("self.backend_name = %s" % (self.backend_name))
        if self.backend_is_bugzilla():
            self.store.execute(__sql_table_bugzilla__)
        elif self.backend_is_jira():
            self.store.execute(__sql_table_jira__)

    def copy_issue(self, db_ilog):
        """
        This method creates a copy of DBBugzilla/JiraIssuesLog object
        """

        if self.backend_is_bugzilla():
            aux = DBBugzillaIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            #aux = DBBugzillaIssuesLog (db_ilog.issue_id)
            aux.alias = db_ilog.alias
            aux.delta_ts = db_ilog.delta_ts
            aux.reporter_accessible = db_ilog.reporter_accessible
            aux.cclist_accessible = db_ilog.cclist_accessible
            aux.classification_id = db_ilog.classification_id
            aux.classification = db_ilog.classification
            aux.product = db_ilog.product
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.rep_platform = db_ilog.rep_platform
            aux.op_sys = db_ilog.op_sys
            aux.dup_id = db_ilog.dup_id
            aux.bug_file_loc = db_ilog.bug_file_loc
            aux.status_whiteboard = db_ilog.status_whiteboard
            aux.target_milestone = db_ilog.target_milestone
            aux.votes = db_ilog.votes
            aux.everconfirmed = db_ilog.everconfirmed
            aux.qa_contact = db_ilog.qa_contact
            aux.estimated_time = db_ilog.estimated_time
            aux.remaining_time = db_ilog.remaining_time
            aux.actual_time = db_ilog.actual_time
            aux.deadline = db_ilog.deadline
            aux.keywords = db_ilog.keywords
            aux.cc = db_ilog.cc
            aux.group_bugzilla = db_ilog.group_bugzilla
            aux.flag = db_ilog.flag
            return aux

        elif self.backend_is_jira():
            aux = DBJiraIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            aux.link = db_ilog.link
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.issue_key = db_ilog.issue_key
            aux.environment = db_ilog.environment
            aux.project = db_ilog.project
            aux.project_key = db_ilog.project_key
            aux.security = db_ilog.security

            return aux

    def get_people_id(self, email, tracker_id):
        """
        Gets the id of an user
        """
        p = self.store.find(DBPeople, DBPeople.email == email).one()
        ##
        ## the code below was created ad-hoc for KDE solid
        ##
        try:
            return p.id
        except AttributeError:
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email, tracker_id)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def get_last_change_date(self):
        """
        This method gets the date of the last change included in the log table
        """
        if self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog)
            aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        elif self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog)
            aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        return None

    def get_issues_changed_since(self, date):
        """
        This method fetchs the issues changes since date
        """

        #SELECT DISTINCT(issues.id) FROM issues, changes
        #WHERE issues.id = changes.issue_id
        #AND (issues.submitted_on >= '2012-02-28 12:34:44'
        #    OR changes.changed_on >= '2012-02-28 12:34:44');

        result = self.store.find(DBIssue,
                                 DBChange.issue_id == DBIssue.id,
                                 Or(DBIssue.submitted_on > date,
                                    DBChange.changed_on > date )).group_by(DBIssue.id)

        return result

    def get_previous_state(self, issue_id):
        """
        This method returns a db_ilog object with the last row found in
        the log table
        """
        db_ilog = None
        if self.backend_is_jira():
            rows = self.store.find(DBJiraIssuesLog,
                                   DBJiraIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBJiraIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBJiraIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.issue_key = aux.issue_key
                db_ilog.link = aux.link
                db_ilog.environment = aux.environment
                db_ilog.security = aux.security
                db_ilog.updated = aux.updated
                db_ilog.version = aux.version
                db_ilog.component = aux.component
                db_ilog.votes = aux.votes
                db_ilog.project = aux.project
                db_ilog.project_id = aux.project_id
                db_ilog.project_key = aux.project_key
        else:  # elif self.backend_is_bugzilla():
            rows = self.store.find(DBBugzillaIssuesLog,
                                   DBBugzillaIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBBugzillaIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBBugzillaIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.alias = aux.alias
                db_ilog.delta_ts = aux.delta_ts
                db_ilog.reporter_accessible = aux.reporter_accessible
                db_ilog.cclist_accessible = aux.cclist_accessible
                db_ilog.classification_id = aux.classification_id
                db_ilog.classification = aux.classification
                db_ilog.product = aux.product
                db_ilog.component = aux.component
                db_ilog.version = aux.version
                db_ilog.rep_platform = aux.rep_platform
                db_ilog.op_sys = aux.op_sys
                db_ilog.dup_id = aux.dup_id
                db_ilog.bug_file_loc = aux.bug_file_loc
                db_ilog.status_whiteboard = aux.status_whiteboard
                db_ilog.target_milestone = aux.target_milestone
                db_ilog.votes = aux.votes
                db_ilog.everconfirmed = aux.everconfirmed
                db_ilog.qa_contact = aux.qa_contact
                db_ilog.estimated_time = aux.estimated_time
                db_ilog.remaining_time = aux.remaining_time
                db_ilog.actual_time = aux.actual_time
                db_ilog.deadline = aux.deadline
                db_ilog.keywords = aux.keywords
                db_ilog.cc = aux.cc
                db_ilog.group_bugzilla = aux.group_bugzilla
                db_ilog.flag = aux.flag

        return db_ilog

    def issue_is_new(self, issue_id):
        """
        This method returns True if the issue is not logged in the log table
        """
        if self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog,
                                     DBJiraIssuesLog.issue_id == issue_id)
        elif self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog,
                                     DBBugzillaIssuesLog.issue_id == issue_id)
        return (result.count() == 0)

    def build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes\
        where issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            value = self.store.execute("SELECT old_value FROM changes \
            WHERE issue_id=%s AND field=\"%s\" ORDER BY changed_on LIMIT 1"
                                  % (db_ilog.issue_id, f[0]))
            for v in value:
                if self.backend_is_bugzilla():
                    # Bugzilla section
                    #
                    if f[0] in bg_issues_links:
                        table_field = bg_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(
                                v[0], self.get_tracker_id(db_ilog.issue_id))
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'alias':
                            db_ilog.alias = v[0]
                        elif table_field == 'reporter_accessible':
                            db_ilog.reporter_accessible = v[0]
                        elif table_field == 'cclist_accessible':
                            db_ilog.cclist_accessible = v[0]
                        elif table_field == 'product':
                            db_ilog.product = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'version':
                            db_ilog.version = v[0]
                        elif table_field == 'rep_platform':
                            db_ilog.rep_platform = v[0]
                        elif table_field == 'op_sys':
                            db_ilog.op_sys = v[0]
                        elif table_field == 'bug_file_loc':
                            db_ilog.bug_file_loc = v[0]
                        elif table_field == 'status_whiteboard':
                            db_ilog.status_whiteboard = v[0]
                        elif table_field == 'target_milestone':
                            db_ilog.target_milestone = v[0]
                        elif table_field == 'votes':
                            db_ilog.votes = v[0]
                        elif table_field == 'everconfirmed':
                            db_ilog.everconfirmed = v[0]
                        elif table_field == 'qa_contact':
                            db_ilog.qa_contact = v[0]
                        elif table_field == 'keywords':
                            db_ilog.Keywords = v[0]
                        elif table_field == 'cc':
                            db_ilog.cc = v[0]
                if self.backend_is_jira():
                    # Jira section
                    #
                    if f[0] in jira_issues_links:
                        table_field = jira_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(v[0])
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'link':
                            db_ilog.link = v[0]
                        elif table_field == 'environment':
                            db_ilog.environment = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'description':
                            db_ilog.description = v[0]
                        elif table_field == 'security':
                            db_ilog.security = v[0]

        return db_ilog

    def backend_is_bugzilla(self):
        return self.backend_name == 'bg'

    def backend_is_jira(self):
        return self.backend_name == 'jira'

    def get_last_values(self, issue_row):
        i = issue_row
        db_ilog = None
        if self.backend_is_bugzilla():
            db_ilog_bugzilla = DBBugzillaIssuesLog(i.issue, i.tracker_id)
            db_ilog_bugzilla.issue_id = i.id
            db_ilog_bugzilla.type = i.type
            db_ilog_bugzilla.summary = i.summary
            db_ilog_bugzilla.description = i.description
            db_ilog_bugzilla.status = i.status
            db_ilog_bugzilla.resolution = i.resolution
            db_ilog_bugzilla.priority = i.priority
            db_ilog_bugzilla.submitted_by = i.submitted_by
            db_ilog_bugzilla.date = i.submitted_on
            db_ilog_bugzilla.assigned_to = i.assigned_to

            ib = self.store.find(DBBugzillaIssueExt, \
                                 DBBugzillaIssueExt.issue_id == db_ilog_bugzilla.issue_id).one()

            ####
            db_ilog_bugzilla.alias = ib.alias
            db_ilog_bugzilla.delta_ts = ib.delta_ts
            db_ilog_bugzilla.reporter_accessible = ib.reporter_accessible
            db_ilog_bugzilla.cclist_accessible = ib.cclist_accessible
            db_ilog_bugzilla.classification_id = ib.classification_id
            db_ilog_bugzilla.classification = ib.classification
            db_ilog_bugzilla.product = ib.product
            db_ilog_bugzilla.component = ib.component
            db_ilog_bugzilla.version = ib.version
            db_ilog_bugzilla.rep_platform = ib.rep_platform
            db_ilog_bugzilla.op_sys = ib.op_sys
            db_ilog_bugzilla.dup_id = ib.dup_id
            db_ilog_bugzilla.bug_file_loc = ib.bug_file_loc
            db_ilog_bugzilla.status_whiteboard = ib.status_whiteboard
            db_ilog_bugzilla.target_milestone = ib.target_milestone
            db_ilog_bugzilla.votes = ib.votes
            db_ilog_bugzilla.everconfirmed = ib.everconfirmed
            db_ilog_bugzilla.qa_contact = ib.qa_contact
            db_ilog_bugzilla.estimated_time = ib.estimated_time
            db_ilog_bugzilla.remaining_time = ib.remaining_time
            db_ilog_bugzilla.actual_time = ib.actual_time
            db_ilog_bugzilla.deadline = ib.deadline
            db_ilog_bugzilla.keywords = ib.keywords
            db_ilog_bugzilla.cc = ib.cc
            db_ilog_bugzilla.group_bugzilla = ib.group_bugzilla
            db_ilog_bugzilla.flag = ib.flag
            db_ilog = db_ilog_bugzilla

        elif self.backend_is_jira():
            db_ilog = DBJiraIssuesLog(i.issue, i.tracker_id)
            db_ilog.issue_id = i.id
            db_ilog.type = i.type
            db_ilog.summary = i.summary
            db_ilog.description = i.description
            db_ilog.status = i.status
            db_ilog.resolution = i.resolution
            db_ilog.priority = i.priority
            db_ilog.submitted_by = i.submitted_by
            db_ilog.date = i.submitted_on
            db_ilog.assigned_to = i.assigned_to

            ib = self.store.find(DBJiraIssueExt, \
                                 DBJiraIssueExt.issue_id == db_ilog.issue_id).one()

            db_ilog.issue_key = ib.issue_key
            db_ilog.link = ib.link
            db_ilog.environment = ib.environment
            db_ilog.security = ib.security
            db_ilog.updated = ib.updated
            db_ilog.version = ib.version
            db_ilog.component = ib.component
            db_ilog.votes = ib.votes
            db_ilog.project = ib.project
            db_ilog.project_id = ib.project_id
            db_ilog.project_key = ib.project_key

        return db_ilog

    def insert_new_bugs_created(self, date_from, date_to):
        """
        This method inserts an entry with the data of the creation time
        """
        if (not date_from) and (not date_to):
            issues = self.store.find(DBIssue)
        elif not date_from:
            issues = self.store.find(DBIssue, DBIssue.submitted_on < date_to)
        elif not date_to:
            issues = self.store.find(DBIssue, DBIssue.submitted_on > date_from)
        else:
            issues = self.store.find(DBIssue,
                                     And(DBIssue.submitted_on <= date_to,
                                         DBIssue.submitted_on > date_from))

        issues = issues.order_by(Asc(DBIssue.submitted_on))
        ## we store the initial data for each bug found
        for i in issues:
            db_ilog = self.get_last_values(i)  # from issues and change tables
            db_ilog = self.build_initial_state(db_ilog)
            self.store.add(db_ilog)
            printdbg("Issue #%s created at %s - date_from = %s - date_to = %s"
                     % (db_ilog.issue, db_ilog.date, date_from, date_to))

    def get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def run(self):

        last_change_date = self.get_last_change_date()
        printdbg("Last change logged at %s" % (last_change_date))

        date_from = None
        date_to = None

        if last_change_date:
            changes = self.store.find(DBChange,
                                      DBChange.changed_on > last_change_date)
            date_from = last_change_date
        else:
            changes = self.store.find(DBChange)

        changes = changes.order_by(Asc(DBChange.changed_on))

        for ch in changes:
            # insert creation if needed
            date_to = ch.changed_on
            self.insert_new_bugs_created(date_from, date_to)
            date_from = date_to

            field = ch.field
            new_value = ch.new_value
            changed_by = ch.changed_by
            date = ch.changed_on
            issue_id = ch.issue_id

            #print("field = %s, new_value = %s, changed_by = %s, date = %s"
            #      % (field, new_value, str(changed_by), str(date)))

            db_ilog = self.get_previous_state(issue_id)

            printdbg("Issue #%s modified at %s" %
                     (db_ilog.issue, date))

            if self.backend_is_bugzilla():
                # Bugzilla section
                #
                #
                if (field in bg_issues_links):
                    table_field = bg_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'alias':
                        db_ilog.alias = new_value
                    elif table_field == 'reporter_accessible':
                        db_ilog.reporter_accessible = new_value
                    elif table_field == 'cclist_accessible':
                        db_ilog.cclist_accessible = new_value
                    elif table_field == 'product':
                        db_ilog.product = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'rep_platform':
                        db_ilog.rep_platform = new_value
                    elif table_field == 'op_sys':
                        db_ilog.op_sys = new_value
                    elif table_field == 'bug_file_loc':
                        db_ilog.bug_file_loc = new_value
                    elif table_field == 'status_whiteboard':
                        db_ilog.status_whiteboard = new_value
                    elif table_field == 'target_milestone':
                        db_ilog.target_milestone = new_value
                    elif table_field == 'votes':
                        db_ilog.votes = new_value
                    elif table_field == 'everconfirmed':
                        db_ilog.everconfirmed = new_value
                    elif table_field == 'qa_contact':
                        db_ilog.qa_contact = new_value
                    elif table_field == 'keywords':
                        db_ilog.Keywords = new_value
                    elif table_field == 'cc':
                        db_ilog.cc = new_value

                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            elif self.backend_is_jira():
                # Jira section
                #
                #

                if (field in jira_issues_links):
                    table_field = jira_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'description':
                        db_ilog.description = new_value
                    elif table_field == 'link':
                        db_ilog.link = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'security':
                        db_ilog.security = new_value
                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            # if there are changes, it stores the last bugs after the last
            # change. If there are no changes, insert all the created bugs
        self.insert_new_bugs_created(date_from, None)
        self.store.commit()
コード例 #34
0
class TableReplacer:
    """
    This is the base class used by every Updater
    """

    def __init__(self, old_db_file, new_db_file, start_ver):

        from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5
        from globaleaks.db.update_6_7 import Node_version_6, Context_version_6
        from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \
            Receiver_version_7, InternalFile_version_7
        from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8
        from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \
            Receiver_version_9, User_version_9
        from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10
        from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node' : [ Node_version_5, Node_version_6, Node_version_7, Node_version_9, None, Node_version_11, None, models.Node],
            'User' : [ User_version_5, User_version_9, None, None, None, models.User, None, None],
            'Context' : [ Context_version_6, None, Context_version_7, Context_version_8, Context_version_11, None, None, models.Context],
            'Receiver': [ Receiver_version_7, None, None, Receiver_version_8, Receiver_version_9, models.Receiver, None, None],
            'ReceiverFile' : [ models.ReceiverFile, None, None, None, None, None, None, None],
            'Notification': [ Notification_version_7, None, None, Notification_version_8, models.Notification, None, None, None],
            'Comment': [ Comment_version_5, models.Comment, None, None, None, None, None, None],
            'InternalTip' : [ InternalTip_version_10, None, None, None, None, None, models.InternalTip, None],
            'InternalFile' : [ InternalFile_version_7, None, None, InternalFile_version_10, None, None, models.InternalFile, None],
            'WhistleblowerTip' : [ models.WhistleblowerTip, None, None, None, None, None, None, None],
            'ReceiverTip' : [ models.ReceiverTip, None, None, None, None, None, None , None],
            'ReceiverInternalTip' : [ models.ReceiverInternalTip, None, None, None, None, None, None, None],
            'ReceiverContext' : [ models.ReceiverContext, None, None, None, None, None, None, None],
            'Message' : [ models.Message, None, None, None, None, None, None, None],
            'Stats' : [models.Stats, None, None, None, None, None, None, None],
            'ApplicationData' : [ApplicationData_version_10, None, None, None, None, None, None, models.ApplicationData],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -5 because the relase 0,1,2,3,4 are not supported anymore
            assert len(v) == (DATABASE_VERSION + 1 - 5), \
                "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k)

        print "%s Opening old DB: %s" % (self.debug_info, old_db_file)
        old_database = create_database("sqlite:%s" % self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database("sqlite:%s" % new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:

            print "%s Acquire SQL schema %s" % (self.debug_info, GLSetting.db_schema_file)

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                print "Unable to access %s" % GLSetting.db_schema_file
                raise Exception("Unable to access db schema file")

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f.readlines()).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query+';')
                    except OperationalError:
                        print "OperationalError in [%s]" % create_query

            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver +1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query+';')
            except OperationalError as excep:
                print "%s OperationalError in [%s]" % (self.debug_info, create_query)
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 5)

        if not self.table_history.has_key(table_name):
            print "Not implemented usage of get_right_model %s (%s %d)" % (
                __file__, table_name, self.start_ver)
            raise NotImplementedError

        assert version <= DATABASE_VERSION, "wrong developer brainsync"

        if self.table_history[table_name][table_index]:
            # print "Immediate return %s = %s at version %d" % \
            #       ( table_name, self.table_history[table_name][table_index], version )
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        #
        # print "Requested version %d of %s need to be collected in the past" %\
        #       (version, table_name)

        while version >= 0:
            if self.table_history[table_name][table_index]:
            # print ".. returning %s = %s" %\
            #           ( table_name, self.table_history[table_name][table_index] )
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):

        print "%s default %s migration assistant: #%d" % (
            self.debug_info, table_name,
            self.store_old.find(self.get_right_model(table_name, self.start_ver)).count())

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for k, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name) )

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        print "%s default %s migration assistant" % (self.debug_info, table_name)

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for k, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name) )

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("ApplicationData")
コード例 #35
0
ファイル: logtable.py プロジェクト: sferdi/lucenebug
class IssuesLog():
    def __init__(self, backend_name):
        self.backend_name = backend_name
        self.connect()
        self.create_db()

    def connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':' +
                                        opts.db_password_out + '@' +
                                        opts.db_hostname_out + ':' +
                                        opts.db_port_out + '/' +
                                        opts.db_database_out)
        self.store = Store(self.database)

    def create_db(self):
        print("self.backend_name = %s" % (self.backend_name))
        if self.backend_is_bugzilla():
            self.store.execute(__sql_table_bugzilla__)
        elif self.backend_is_jira():
            self.store.execute(__sql_table_jira__)

    def copy_issue(self, db_ilog):
        """
        This method creates a copy of DBBugzilla/JiraIssuesLog object
        """

        if self.backend_is_bugzilla():
            aux = DBBugzillaIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            #aux = DBBugzillaIssuesLog (db_ilog.issue_id)
            aux.alias = db_ilog.alias
            aux.delta_ts = db_ilog.delta_ts
            aux.reporter_accessible = db_ilog.reporter_accessible
            aux.cclist_accessible = db_ilog.cclist_accessible
            aux.classification_id = db_ilog.classification_id
            aux.classification = db_ilog.classification
            aux.product = db_ilog.product
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.rep_platform = db_ilog.rep_platform
            aux.op_sys = db_ilog.op_sys
            aux.dup_id = db_ilog.dup_id
            aux.bug_file_loc = db_ilog.bug_file_loc
            aux.status_whiteboard = db_ilog.status_whiteboard
            aux.target_milestone = db_ilog.target_milestone
            aux.votes = db_ilog.votes
            aux.everconfirmed = db_ilog.everconfirmed
            aux.qa_contact = db_ilog.qa_contact
            aux.estimated_time = db_ilog.estimated_time
            aux.remaining_time = db_ilog.remaining_time
            aux.actual_time = db_ilog.actual_time
            aux.deadline = db_ilog.deadline
            aux.keywords = db_ilog.keywords
            aux.cc = db_ilog.cc
            aux.group_bugzilla = db_ilog.group_bugzilla
            aux.flag = db_ilog.flag
            return aux

        elif self.backend_is_jira():
            aux = DBJiraIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            aux.link = db_ilog.link
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.issue_key = db_ilog.issue_key
            aux.environment = db_ilog.environment
            aux.project = db_ilog.project
            aux.project_key = db_ilog.project_key
            aux.security = db_ilog.security

            return aux

    def get_people_id(self, email, tracker_id):
        """
        Gets the id of an user
        """
        p = self.store.find(DBPeople, DBPeople.email == email).one()
        ##
        ## the code below was created ad-hoc for KDE solid
        ##
        try:
            return p.id
        except AttributeError:
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email, tracker_id)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def get_last_change_date(self):
        """
        This method gets the date of the last change included in the log table
        """
        if self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog)
            aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        elif self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog)
            aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        return None

    def get_issues_changed_since(self, date):
        """
        This method fetchs the issues changes since date
        """

        #SELECT DISTINCT(issues.id) FROM issues, changes
        #WHERE issues.id = changes.issue_id
        #AND (issues.submitted_on >= '2012-02-28 12:34:44'
        #    OR changes.changed_on >= '2012-02-28 12:34:44');

        result = self.store.find(
            DBIssue, DBChange.issue_id == DBIssue.id,
            Or(DBIssue.submitted_on > date,
               DBChange.changed_on > date)).group_by(DBIssue.id)

        return result

    def get_previous_state(self, issue_id):
        """
        This method returns a db_ilog object with the last row found in
        the log table
        """
        db_ilog = None
        if self.backend_is_jira():
            rows = self.store.find(DBJiraIssuesLog,
                                   DBJiraIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBJiraIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBJiraIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.issue_key = aux.issue_key
                db_ilog.link = aux.link
                db_ilog.environment = aux.environment
                db_ilog.security = aux.security
                db_ilog.updated = aux.updated
                db_ilog.version = aux.version
                db_ilog.component = aux.component
                db_ilog.votes = aux.votes
                db_ilog.project = aux.project
                db_ilog.project_id = aux.project_id
                db_ilog.project_key = aux.project_key
        else:  # elif self.backend_is_bugzilla():
            rows = self.store.find(DBBugzillaIssuesLog,
                                   DBBugzillaIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBBugzillaIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBBugzillaIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.alias = aux.alias
                db_ilog.delta_ts = aux.delta_ts
                db_ilog.reporter_accessible = aux.reporter_accessible
                db_ilog.cclist_accessible = aux.cclist_accessible
                db_ilog.classification_id = aux.classification_id
                db_ilog.classification = aux.classification
                db_ilog.product = aux.product
                db_ilog.component = aux.component
                db_ilog.version = aux.version
                db_ilog.rep_platform = aux.rep_platform
                db_ilog.op_sys = aux.op_sys
                db_ilog.dup_id = aux.dup_id
                db_ilog.bug_file_loc = aux.bug_file_loc
                db_ilog.status_whiteboard = aux.status_whiteboard
                db_ilog.target_milestone = aux.target_milestone
                db_ilog.votes = aux.votes
                db_ilog.everconfirmed = aux.everconfirmed
                db_ilog.qa_contact = aux.qa_contact
                db_ilog.estimated_time = aux.estimated_time
                db_ilog.remaining_time = aux.remaining_time
                db_ilog.actual_time = aux.actual_time
                db_ilog.deadline = aux.deadline
                db_ilog.keywords = aux.keywords
                db_ilog.cc = aux.cc
                db_ilog.group_bugzilla = aux.group_bugzilla
                db_ilog.flag = aux.flag

        return db_ilog

    def issue_is_new(self, issue_id):
        """
        This method returns True if the issue is not logged in the log table
        """
        if self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog,
                                     DBJiraIssuesLog.issue_id == issue_id)
        elif self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog,
                                     DBBugzillaIssuesLog.issue_id == issue_id)
        return (result.count() == 0)

    def build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes\
        where issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            value = self.store.execute("SELECT old_value FROM changes \
            WHERE issue_id=%s AND field=\"%s\" ORDER BY changed_on LIMIT 1" %
                                       (db_ilog.issue_id, f[0]))
            for v in value:
                if self.backend_is_bugzilla():
                    # Bugzilla section
                    #
                    if f[0] in bg_issues_links:
                        table_field = bg_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(
                                v[0], self.get_tracker_id(db_ilog.issue_id))
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'alias':
                            db_ilog.alias = v[0]
                        elif table_field == 'reporter_accessible':
                            db_ilog.reporter_accessible = v[0]
                        elif table_field == 'cclist_accessible':
                            db_ilog.cclist_accessible = v[0]
                        elif table_field == 'product':
                            db_ilog.product = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'version':
                            db_ilog.version = v[0]
                        elif table_field == 'rep_platform':
                            db_ilog.rep_platform = v[0]
                        elif table_field == 'op_sys':
                            db_ilog.op_sys = v[0]
                        elif table_field == 'bug_file_loc':
                            db_ilog.bug_file_loc = v[0]
                        elif table_field == 'status_whiteboard':
                            db_ilog.status_whiteboard = v[0]
                        elif table_field == 'target_milestone':
                            db_ilog.target_milestone = v[0]
                        elif table_field == 'votes':
                            db_ilog.votes = v[0]
                        elif table_field == 'everconfirmed':
                            db_ilog.everconfirmed = v[0]
                        elif table_field == 'qa_contact':
                            db_ilog.qa_contact = v[0]
                        elif table_field == 'keywords':
                            db_ilog.Keywords = v[0]
                        elif table_field == 'cc':
                            db_ilog.cc = v[0]
                if self.backend_is_jira():
                    # Jira section
                    #
                    if f[0] in jira_issues_links:
                        table_field = jira_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(v[0])
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'link':
                            db_ilog.link = v[0]
                        elif table_field == 'environment':
                            db_ilog.environment = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'description':
                            db_ilog.description = v[0]
                        elif table_field == 'security':
                            db_ilog.security = v[0]

        return db_ilog

    def backend_is_bugzilla(self):
        return self.backend_name == 'bg'

    def backend_is_jira(self):
        return self.backend_name == 'jira'

    def get_last_values(self, issue_row):
        i = issue_row
        db_ilog = None
        if self.backend_is_bugzilla():
            db_ilog_bugzilla = DBBugzillaIssuesLog(i.issue, i.tracker_id)
            db_ilog_bugzilla.issue_id = i.id
            db_ilog_bugzilla.type = i.type
            db_ilog_bugzilla.summary = i.summary
            db_ilog_bugzilla.description = i.description
            db_ilog_bugzilla.status = i.status
            db_ilog_bugzilla.resolution = i.resolution
            db_ilog_bugzilla.priority = i.priority
            db_ilog_bugzilla.submitted_by = i.submitted_by
            db_ilog_bugzilla.date = i.submitted_on
            db_ilog_bugzilla.assigned_to = i.assigned_to

            ib = self.store.find(DBBugzillaIssueExt, \
                                 DBBugzillaIssueExt.issue_id == db_ilog_bugzilla.issue_id).one()

            ####
            db_ilog_bugzilla.alias = ib.alias
            db_ilog_bugzilla.delta_ts = ib.delta_ts
            db_ilog_bugzilla.reporter_accessible = ib.reporter_accessible
            db_ilog_bugzilla.cclist_accessible = ib.cclist_accessible
            db_ilog_bugzilla.classification_id = ib.classification_id
            db_ilog_bugzilla.classification = ib.classification
            db_ilog_bugzilla.product = ib.product
            db_ilog_bugzilla.component = ib.component
            db_ilog_bugzilla.version = ib.version
            db_ilog_bugzilla.rep_platform = ib.rep_platform
            db_ilog_bugzilla.op_sys = ib.op_sys
            db_ilog_bugzilla.dup_id = ib.dup_id
            db_ilog_bugzilla.bug_file_loc = ib.bug_file_loc
            db_ilog_bugzilla.status_whiteboard = ib.status_whiteboard
            db_ilog_bugzilla.target_milestone = ib.target_milestone
            db_ilog_bugzilla.votes = ib.votes
            db_ilog_bugzilla.everconfirmed = ib.everconfirmed
            db_ilog_bugzilla.qa_contact = ib.qa_contact
            db_ilog_bugzilla.estimated_time = ib.estimated_time
            db_ilog_bugzilla.remaining_time = ib.remaining_time
            db_ilog_bugzilla.actual_time = ib.actual_time
            db_ilog_bugzilla.deadline = ib.deadline
            db_ilog_bugzilla.keywords = ib.keywords
            db_ilog_bugzilla.cc = ib.cc
            db_ilog_bugzilla.group_bugzilla = ib.group_bugzilla
            db_ilog_bugzilla.flag = ib.flag
            db_ilog = db_ilog_bugzilla

        elif self.backend_is_jira():
            db_ilog = DBJiraIssuesLog(i.issue, i.tracker_id)
            db_ilog.issue_id = i.id
            db_ilog.type = i.type
            db_ilog.summary = i.summary
            db_ilog.description = i.description
            db_ilog.status = i.status
            db_ilog.resolution = i.resolution
            db_ilog.priority = i.priority
            db_ilog.submitted_by = i.submitted_by
            db_ilog.date = i.submitted_on
            db_ilog.assigned_to = i.assigned_to

            ib = self.store.find(DBJiraIssueExt, \
                                 DBJiraIssueExt.issue_id == db_ilog.issue_id).one()

            db_ilog.issue_key = ib.issue_key
            db_ilog.link = ib.link
            db_ilog.environment = ib.environment
            db_ilog.security = ib.security
            db_ilog.updated = ib.updated
            db_ilog.version = ib.version
            db_ilog.component = ib.component
            db_ilog.votes = ib.votes
            db_ilog.project = ib.project
            db_ilog.project_id = ib.project_id
            db_ilog.project_key = ib.project_key

        return db_ilog

    def insert_new_bugs_created(self, date_from, date_to):
        """
        This method inserts an entry with the data of the creation time
        """
        if (not date_from) and (not date_to):
            issues = self.store.find(DBIssue)
        elif not date_from:
            issues = self.store.find(DBIssue, DBIssue.submitted_on < date_to)
        elif not date_to:
            issues = self.store.find(DBIssue, DBIssue.submitted_on > date_from)
        else:
            issues = self.store.find(
                DBIssue,
                And(DBIssue.submitted_on <= date_to,
                    DBIssue.submitted_on > date_from))

        issues = issues.order_by(Asc(DBIssue.submitted_on))
        ## we store the initial data for each bug found
        for i in issues:
            db_ilog = self.get_last_values(i)  # from issues and change tables
            db_ilog = self.build_initial_state(db_ilog)
            self.store.add(db_ilog)
            printdbg(
                "Issue #%s created at %s - date_from = %s - date_to = %s" %
                (db_ilog.issue, db_ilog.date, date_from, date_to))

    def get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def run(self):

        last_change_date = self.get_last_change_date()
        printdbg("Last change logged at %s" % (last_change_date))

        date_from = None
        date_to = None

        if last_change_date:
            changes = self.store.find(DBChange,
                                      DBChange.changed_on > last_change_date)
            date_from = last_change_date
        else:
            changes = self.store.find(DBChange)

        changes = changes.order_by(Asc(DBChange.changed_on))

        for ch in changes:
            # insert creation if needed
            date_to = ch.changed_on
            self.insert_new_bugs_created(date_from, date_to)
            date_from = date_to

            field = ch.field
            new_value = ch.new_value
            changed_by = ch.changed_by
            date = ch.changed_on
            issue_id = ch.issue_id

            #print("field = %s, new_value = %s, changed_by = %s, date = %s"
            #      % (field, new_value, str(changed_by), str(date)))

            db_ilog = self.get_previous_state(issue_id)

            printdbg("Issue #%s modified at %s" % (db_ilog.issue, date))

            if self.backend_is_bugzilla():
                # Bugzilla section
                #
                #
                if (field in bg_issues_links):
                    table_field = bg_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'alias':
                        db_ilog.alias = new_value
                    elif table_field == 'reporter_accessible':
                        db_ilog.reporter_accessible = new_value
                    elif table_field == 'cclist_accessible':
                        db_ilog.cclist_accessible = new_value
                    elif table_field == 'product':
                        db_ilog.product = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'rep_platform':
                        db_ilog.rep_platform = new_value
                    elif table_field == 'op_sys':
                        db_ilog.op_sys = new_value
                    elif table_field == 'bug_file_loc':
                        db_ilog.bug_file_loc = new_value
                    elif table_field == 'status_whiteboard':
                        db_ilog.status_whiteboard = new_value
                    elif table_field == 'target_milestone':
                        db_ilog.target_milestone = new_value
                    elif table_field == 'votes':
                        db_ilog.votes = new_value
                    elif table_field == 'everconfirmed':
                        db_ilog.everconfirmed = new_value
                    elif table_field == 'qa_contact':
                        db_ilog.qa_contact = new_value
                    elif table_field == 'keywords':
                        db_ilog.Keywords = new_value
                    elif table_field == 'cc':
                        db_ilog.cc = new_value

                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            elif self.backend_is_jira():
                # Jira section
                #
                #

                if (field in jira_issues_links):
                    table_field = jira_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'description':
                        db_ilog.description = new_value
                    elif table_field == 'link':
                        db_ilog.link = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'security':
                        db_ilog.security = new_value
                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            # if there are changes, it stores the last bugs after the last
            # change. If there are no changes, insert all the created bugs
        self.insert_new_bugs_created(date_from, None)
        self.store.commit()
コード例 #36
0
ファイル: base_updater.py プロジェクト: keichacon/GlobaLeaks
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """
    def __init__(self, old_db_file, new_db_file, start_ver):
        from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8
        from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9
        from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10
        from globaleaks.db.update_11_12 import Node_v_11, Context_v_11
        from globaleaks.db.update_12_13 import Node_v_12, Context_v_12
        from globaleaks.db.update_13_14 import Node_v_13, Context_v_13
        from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \
            InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14
        from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15
        from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16
        from globaleaks.db.update_17_18 import Node_v_17
        from globaleaks.db.update_18_19 import Node_v_18

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [
                Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13,
                Node_v_14, Node_v_16, None, Node_v_17, Node_v_18, models.Node
            ],
            'User': [
                User_v_9, None, User_v_14, None, None, None, None, models.User,
                None, None, None, None
            ],
            'Context': [
                Context_v_8, Context_v_11, None, None, Context_v_12,
                Context_v_13, Context_v_14, models.Context, None, None, None,
                None
            ],
            'Receiver': [
                Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None,
                None, Receiver_v_15, Receiver_v_16, models.Receiver, None, None
            ],
            'ReceiverFile': [
                models.ReceiverFile, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Notification': [
                Notification_v_8, Notification_v_14, None, None, None, None,
                None, Notification_v_15, Notification_v_16,
                models.Notification, None, None
            ],
            'Comment': [
                Comment_v_14, None, None, None, None, None, None,
                models.Comment, None, None, None, None
            ],
            'InternalTip': [
                InternalTip_v_10, None, None, InternalTip_v_14, None, None,
                None, models.InternalTip, None, None, None, None
            ],
            'InternalFile': [
                InternalFile_v_10, None, None, models.InternalFile, None, None,
                None, None, None, None, None, None
            ],
            'WhistleblowerTip': [
                models.WhistleblowerTip, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'ReceiverTip': [
                models.ReceiverTip, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'ReceiverInternalTip': [
                models.ReceiverInternalTip, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'ReceiverContext': [
                models.ReceiverContext, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'Message': [
                models.Message, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'Stats': [
                Stats_v_14, None, None, None, None, None, None, Stats_v_16,
                None, models.Stats, None, None
            ],
            'ApplicationData': [
                models.ApplicationData, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'Field': [
                models.Field, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'FieldOption': [
                models.FieldOption, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'FieldField': [
                models.FieldField, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Step': [
                models.Step, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'StepField': [
                models.StepField, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Anomalies': [
                models.Anomalies, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'EventLogs': [
                models.EventLogs, None, None, None, None, None, None, None,
                None, None, None, None
            ],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(
                    length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(
                self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg(
                            'OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(
                    self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 8)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError(
                'Version supplied must be less or equal to {}'.format(
                    DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        models_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, models_count))

        old_objects = self.store_old.find(
            self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(
            self.debug_info, table_name))

        old_obj = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldOption(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        has been created between 15 and 16!
        """
        if self.start_ver < 16:
            return

        self._perform_copy_list("EventLogs")
コード例 #37
0
class StormManager(Singleton):
    log = logging.getLogger('{}.StormManager'.format(__name__))

    def __init__(self):
        pass

    @loggingInfo
    def init(self, *args):
        self.dbOK = False
        self.openDB()

    @loggingInfo
    def reset(self):
        self.closeDB()
        self.openDB()

    @loggingInfo
    def openDB(self):
        try:
            self._config = ConfigManager()
            self.db = self._config.config[self._config.database]["database"]
            create_db = False
            if self.db == self._config.Sqlite:
                folder = self._config.config[self._config.database]["folder"]
                loc = folder + '/icepapcms.db'
                print("Using Sqlite database at %s" % loc)
                create_db = not os.path.exists(loc)
                if create_db:
                    print("No database file found, creating it")
                    if not os.path.exists(folder):
                        os.mkdir(folder)
                self._database = create_database("%s:%s" % (self.db, loc))
            else:
                server = self._config.config[self._config.database]["server"]
                user = self._config.config[self._config.database]["user"]
                pwd = self._config.config[self._config.database]["password"]
                scheme = "{}://{}:{}@{}/icepapcms".format(
                    self.db, user, pwd, server)

                if self.db == 'mysql':
                    self._database = MySQL(scheme)
                else:
                    self._database = create_database(scheme)

            self._store = Store(self._database)
            if create_db:
                self.dbOK = self.createSqliteDB()
            else:
                self.dbOK = True
        except Exception as e:
            self.log.error("Unexpected error on openDB: %s", e)
            self.dbOK = False

    @loggingInfo
    def createSqliteDB(self):
        try:
            sql_file = resource_filename('icepapcms.db', 'creates_sqlite.sql')
            with open(sql_file) as f:
                sql_script = f.read()
            statements = re.compile(r";[ \t]*$", re.M)

            for statement in statements.split(sql_script):
                # Remove any comments from the file
                statement = re.sub(r"--.*[\n\\Z]", "", statement)
                if statement.strip():
                    create = statement + ";"
                    self._store.execute(create)
            self._store.commit()
            return True
        except Exception as e:
            self.log.error("Unexpected error on createSqliteDB: %s", e)
            return False

    @loggingInfo
    def closeDB(self):
        try:
            if self.dbOK:
                self._store.close()
            return True
        except Exception as e:
            self.log.error("Unexpected error on closeDB:", e)
            self.dbOK = False
            return False

    @loggingInfo
    def store(self, obj):
        self._store.add(obj)

    @loggingInfo
    def remove(self, obj):
        self._store.remove(obj)

    @loggingInfo
    def addIcepapSystem(self, icepap_system):
        try:
            self._store.add(icepap_system)
            self.commitTransaction()
            return True
        except Exception as e:
            self.log.error(
                "some exception trying to store the icepap system "
                "%s: %s", icepap_system, e)
            return False

    @loggingInfo
    def deleteLocation(self, location):
        if self.db == self._config.Sqlite:
            for system in location.systems:
                self.deleteIcepapSystem(system)
        self._store.remove(location)
        self.commitTransaction()

    @loggingInfo
    def deleteIcepapSystem(self, icepap_system):
        if self.db == self._config.Sqlite:
            for driver in icepap_system.drivers:
                self.deleteDriver(driver)
        self._store.remove(icepap_system)
        self.commitTransaction()

    @loggingInfo
    def deleteDriver(self, driver):

        for cfg in driver.historic_cfgs:
            for par in cfg.parameters:
                self._store.remove(par)
            self._store.remove(cfg)
        self._store.remove(driver)
        self.commitTransaction()

    @loggingInfo
    def getAllLocations(self):
        try:
            locations = self._store.find(Location)
            location_dict = {}
            for location in locations:
                location_dict[location.name] = location
            return location_dict
        except Exception as e:
            self.log.error("Unexpected error on getAllLocations: %s", e)
            return {}

    @loggingInfo
    def getLocation(self, name):
        return self._store.get(Location, name)

    @loggingInfo
    def getIcepapSystem(self, icepap_name):
        return self._store.get(IcepapSystem, icepap_name)

    @loggingInfo
    def existsDriver(self, mydriver, id):

        drivers = self._store.find(
            IcepapDriver, IcepapDriver.addr == IcepapDriverCfg.driver_addr,
            IcepapDriverCfg.id == CfgParameter.cfg_id,
            CfgParameter.name == str("ID"), CfgParameter.value == id)
        if drivers:
            for driver in drivers:
                if driver.addr != mydriver.addr:
                    return driver
            return None
        else:
            return None

    @loggingInfo
    def getLocationIcepapSystem(self, location):
        try:
            icepaps = self._store.find(IcepapSystem,
                                       IcepapSystem.location_name == location)
            icepaps.order_by(IcepapSystem.name)
            ipapdict = {}
            for ipap_sys in icepaps:
                ipapdict[ipap_sys.name] = ipap_sys
            return ipapdict
        except Exception as e:
            self.log.error(
                "Unexpected error on getLocationIcepapSystem: "
                "%s", e)
            return {}

    @loggingInfo
    def rollback(self):
        self._store.rollback()

    @loggingInfo
    def commitTransaction(self):
        try:
            self._store.commit()
            return True
        except Exception:
            return False
コード例 #38
0
ファイル: easycsv.py プロジェクト: wilsonfreitas/easycsv
class StormORM(ORM):
    """
    Storm implementation of ORM super class.
    """
    def __init__(self, uri=None, store=None):
        '''
        @param uri: Database URI following storm rules.
        @param store: Storm store.
        
        If uri is given a new store is instanciated and it is used 
        to execute the statements.
        If both parameters are given the early created store overrides
        the store given.
        '''
        from storm.locals import create_database, Store
        self.uri = uri
        self.store = store
        if self.uri:
            database = create_database(self.uri)
            self.store = Store(database)
        if not self.store:
            raise Exception('None storm store')
        self.attrParser = StormAttributeParser()
            
    def _getObject(self, csvType, csvStatement):
        """
        Retrieves the object to be used at statement execution.
        
        @param csvType: The CSVType
        @param csvStatement: The CSVStatement
        
        @return: The object early instanciated (for insert statement) or
        retrieved from database (for update or delete statements).
        """
        typo = csvType.type
        keys = csvType.keys
        attributes = csvStatement.attributes
        if csvStatement.action in [DELETE, UPDATE]:
            if csvType.hasPrimaryKey:
                return self.store.get(typo, attributes[ csvType.primaryKey[0] ])
            else:
                pred = And([Eq(typo, key, attributes[i]) for i,key in keys.iteritems()])
                result = self.store.find(typo, pred)
                if result.count() == 0:
                    return None
                elif result.count() == 1:
                    return result.one()
                else:
                    return [r for r in result]
        elif csvStatement.action is INSERT:
            return typo()
    
    def executeStatement(self, csvType, csvStatement):
        """
        Executes csv statements matched by the pair csvType, csvStatement.
        
        @param csvType: The CSVType
        @param csvStatement: The CSVStatement
        
        @return: Total statements executed or raises a ValueError if the object retrieved with
        the pair csvType, csvStatement is None.
        """
        obj = self._getObject(csvType, csvStatement)
        
        if not obj:
            msg = 'Statement return None in line %d: %s' % (csvStatement.lineNumber, csvStatement.lineContent)
            raise ValueError(msg)
            
        objs = []
        
        if type(obj) is list:
            objs += obj
        else:
            objs.append(obj)
            
        i = 0
        for _obj in objs:
            self._executeStatement(_obj, csvType, csvStatement)
            i += 1
            
        return i
    
    def _executeStatement(self, obj, csvType, csvStatement):
        """
        Executes a single csv statement
        
        @param csvType: The CSVType
        @param csvStatement: The CSVStatement
        """
        keys = csvType.keys
        attributes = csvType.attributes
        values = csvStatement.attributes
        if csvStatement.action is INSERT:
            pairs = [(key, values[i]) for i,key in keys.iteritems()]
            pairs += [(key, values[i]) for i,key in attributes.iteritems()]
            for key, value in pairs:
                setattr(obj, key, value)
            self.store.add(obj)
        elif csvStatement.action is UPDATE:
            pairs = [(key, values[i]) for i,key in attributes.iteritems()]
            for key, value in pairs:
                setattr(obj, key, value)
        elif csvStatement.action is DELETE:
            self.store.remove(obj)
        self.store.commit()
コード例 #39
0
ファイル: test_migration.py プロジェクト: Taipo/GlobaLeaks
class TestMigrationRegression(unittest.TestCase):
    def _initStartDB(self, target_ver):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % target_ver
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        self.store = Store(create_database(GLSettings.db_uri))

    def test_check_field_constraints(self):
        # This test case asserts that a migration from db ver 32 up to 34 with
        # fields that fail the constraints still functions.
        self._initStartDB(32)

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'reference'
        field_dict['step_id'] = None
        field_dict['field_id'] = None

        db_create_field(self.store, field_dict, u'en')

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'instance'

        db_create_field(self.store, field_dict, u'en')

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'template'
        field_dict['step_id'] = None
        fld_grp_id = self.store.find(Field, Field.fieldgroup_id is not None)[0].fieldgroup_id
        field_dict['field_id'] = fld_grp_id

        db_create_field(self.store, field_dict, u'en')
        self.store.commit()

        ret = perform_system_update()
        shutil.rmtree(GLSettings.db_path)
        self.assertNotEqual(ret, -1)

    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val('export_template', 'ar')

        #print notification_l10n.get_val('export_template', 'ar')
        notification_l10n.set_val('export_template', 'ar', '')

        t1 = notification_l10n.get_val('export_template', 'ar')

        self.assertEqual(t1, '')

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val('export_template', 'ar')
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
コード例 #40
0
ファイル: IssuesLog.py プロジェクト: AlertProject/Bicho
class IssuesLog:
    def __init__(self, backend_name):
        self.backend_name = backend_name
        self.connect()
        self.create_db()

    def connect(self):
        opts = Config()

        self.database = create_database(
            "mysql://"
            + opts.db_user_out
            + ":"
            + opts.db_password_out
            + "@"
            + opts.db_hostname_out
            + ":"
            + opts.db_port_out
            + "/"
            + opts.db_database_out
        )
        self.store = Store(self.database)

    def create_db(self):
        self.store.execute(__sql_table__)

    def copy_issue(self, db_ilog):
        """
        This method is create a copy of a DBIssueLog object
        """
        aux = DBIssuesLog(db_ilog.issue, db_ilog.tracker_id)
        aux.issue_id = db_ilog.issue_id
        aux.type = db_ilog.type
        aux.summary = db_ilog.summary
        aux.description = db_ilog.description
        aux.status = db_ilog.status
        aux.resolution = db_ilog.resolution
        aux.priority = db_ilog.priority
        aux.submitted_by = db_ilog.submitted_by
        aux.date = db_ilog.date
        aux.assigned_to = db_ilog.assigned_to
        return aux

    def build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes where issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            value = self.store.execute(
                'SELECT old_value FROM changes WHERE issue_id=%s AND field="%s" ORDER BY changed_on LIMIT 1'
                % (db_ilog.issue_id, f[0])
            )
            for v in value:
                # Bugzilla section
                #
                if f[0] in bg_issues_links:
                    table_field = bg_issues_links[f[0]]
                    if table_field == "summary":
                        db_ilog.summary = v[0]
                    elif table_field == "priority":
                        db_ilog.priority = v[0]
                    elif table_field == "assigned_to":
                        db_ilog.assigned_to = v[0]
                    elif table_field == "status":
                        db_ilog.status = v[0]
                    elif table_field == "resolution":
                        db_ilog.resolution = v[0]
        return db_ilog

    def run(self):
        issues = self.store.find(DBIssue)
        for i in issues:
            db_ilog = DBIssuesLog(i.issue, i.tracker_id)
            db_ilog.issue_id = i.id
            db_ilog.type = i.type
            db_ilog.summary = i.summary
            db_ilog.description = i.description
            db_ilog.status = i.status
            db_ilog.resolution = i.resolution
            db_ilog.priority = i.priority
            db_ilog.submitted_by = i.submitted_by
            db_ilog.date = i.submitted_on
            db_ilog.assigned_to = i.assigned_to

            db_ilog = self.build_initial_state(db_ilog)

            self.store.add(db_ilog)

            # the code below gets all the changes and insert a row per change
            changes = self.store.execute(
                "SELECT field, new_value, changed_by, changed_on FROM changes where issue_id=%s" % (db_ilog.issue_id)
            )

            for ch in changes:
                field = ch[0]
                new_value = ch[1]
                changed_by = ch[2]
                date = ch[3]

                db_ilog = self.copy_issue(db_ilog)

                # Bugzilla section
                #
                if field in bg_issues_links:
                    table_field = bg_issues_links[field]
                    if table_field == "summary":
                        db_ilog.summary = new_value
                    elif table_field == "priority":
                        db_ilog.priority = new_value
                    elif table_field == "assignted_to":
                        db_ilog.assigned_to = new_value
                    elif table_field == "status":
                        db_ilog.status = new_value
                    elif table_field == "resolution":
                        db_ilog.resolution = new_value
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            self.store.commit()
コード例 #41
0
class TestMigrationRegression(unittest.TestCase):
    def _initStartDB(self, target_ver):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % target_ver
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        self.store = Store(create_database(GLSettings.db_uri))

    def test_check_field_constraints(self):
        # This test case asserts that a migration from db ver 32 up to 34 with
        # fields that fail the constraints still functions.
        self._initStartDB(32)

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'reference'
        field_dict['step_id'] = None
        field_dict['field_id'] = None

        db_create_field(self.store, field_dict, u'en')

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'instance'

        db_create_field(self.store, field_dict, u'en')

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'template'
        field_dict['step_id'] = None
        fld_grp_id = self.store.find(Field, Field.fieldgroup_id
                                     is not None)[0].fieldgroup_id
        field_dict['field_id'] = fld_grp_id

        db_create_field(self.store, field_dict, u'en')
        self.store.commit()

        ret = perform_system_update()
        shutil.rmtree(GLSettings.db_path)
        self.assertNotEqual(ret, -1)

    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val('export_template', 'ar')

        #print notification_l10n.get_val('export_template', 'ar')
        notification_l10n.set_val('export_template', 'ar', '')

        t1 = notification_l10n.get_val('export_template', 'ar')

        self.assertEqual(t1, '')

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val('export_template', 'ar')
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)
コード例 #42
0
ファイル: testing.py プロジェクト: DamnWidget/mamba-storm
class ZStormResourceManagerTest(TestHelper):

    def is_supported(self):
        return has_transaction and has_zope_component and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        package_dir = self.makeDir()
        sys.path.append(package_dir)
        self.patch_dir = os.path.join(package_dir, "patch_package")
        os.mkdir(self.patch_dir)
        self.makeFile(path=os.path.join(self.patch_dir, "__init__.py"),
                      content="")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema(create, drop, delete, patch_package)
        self.databases = [{"name": "test", "uri": uri, "schema": schema}]
        self.resource = ZStormResourceManager(self.databases)
        self.store = Store(create_database(uri))

    def tearDown(self):
        global_zstorm._reset()
        del sys.modules["patch_package"]
        if "patch_1" in sys.modules:
            del sys.modules["patch_1"]
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_lazy(self):
        """
        L{ZStormResourceManager.make} does not create all stores upfront, but
        only when they're actually used, likewise L{ZStorm.get}.
        """
        zstorm = self.resource.make([])
        self.assertEqual([], list(zstorm.iterstores()))
        store = zstorm.get("test")
        self.assertEqual([("test", store)], list(zstorm.iterstores()))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_upgrade_unknown_patch(self):
        """
        L{ZStormResourceManager.make} resets the schema if an unknown patch
        is found
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("INSERT INTO patch VALUES (2)")
        self.store.execute("CREATE TABLE test (foo TEXT, egg BOOL)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))
        self.assertEqual([(1,)],
                         list(store.execute("SELECT version FROM patch")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_make_commits_transaction_once(self):
        """
        L{ZStormResourceManager.make} commits schema changes only once
        across all stores, after all patch and delete statements have
        been executed.
        """
        database2 = {"name": "test2",
                     "uri": "sqlite:///%s" % self.makeFile(),
                     "schema": self.databases[0]["schema"]}
        self.databases.append(database2)
        other_store = Store(create_database(database2["uri"]))
        for store in [self.store, other_store]:
            store.execute("CREATE TABLE patch "
                          "(version INTEGER NOT NULL PRIMARY KEY)")
            store.execute("CREATE TABLE test (foo TEXT)")
            store.execute("INSERT INTO test (foo) VALUES ('data')")
            store.commit()

        with CaptureTracer() as tracer:
            zstorm = self.resource.make([])

        self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:])
        store1 = zstorm.get("test")
        store2 = zstorm.get("test2")
        self.assertEqual([], list(store1.execute("SELECT foo FROM test")))
        self.assertEqual([], list(store2.execute("SELECT foo FROM test")))

    def test_make_zstorm_overwritten(self):
        """
        L{ZStormResourceManager.make} registers its own ZStorm again if a test
        has registered a new ZStorm utility overwriting the resource one.
        """
        zstorm = self.resource.make([])
        provideUtility(ZStorm())
        self.resource.make([])
        self.assertIs(zstorm, getUtility(IZStorm))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """
        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_clean_with_force_delete(self):
        """
        If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete}
        is always invoked upon test cleanup.
        """
        zstorm = self.resource.make([])
        zstorm.get("test")  # Force the creation of the store
        self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        self.store.commit()
        self.resource.force_delete = True
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_wb_clean_clears_alive_cache_before_abort(self):
        """
        L{ZStormResourceManager.clean} clears the alive cache before
        aborting the transaction.
        """
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(1))
        store.add(Test(2))
        real_invalidate = store.invalidate

        def invalidate_proxy():
            self.assertEqual(0, len(store._alive.values()))
            real_invalidate()
        store.invalidate = invalidate_proxy

        self.resource.clean(zstorm)

    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch",)], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))

    def test_schema_uri_with_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, and the stamp indicates there's no
        need to update the schema, the resource clean up code will still
        connect as schema user if it needs to run the schema delete statements
        because of a commit.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.databases[0]["schema-uri"] = self.databases[0]["uri"]
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir
        zstorm = resource2.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo) VALUES ('data')")
        store.commit()  # Committing will force a schema.delete() run
        resource2.clean(zstorm)
        self.assertEqual([], list(store.execute("SELECT * FROM test")))

    def test_no_schema(self):
        """
        A particular database may have no schema associated.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([],
                         list(store.execute("SELECT * FROM sqlite_master")))

    def test_no_schema_clean(self):
        """
        A particular database may have no schema associated. If it's committed
        during tests, it will just be skipped when cleaning up tables.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.commit()

        with CaptureTracer() as tracer:
            self.resource.clean(zstorm)

        self.assertEqual([], tracer.queries)

    def test_deprecated_database_format(self):
        """
        The old deprecated format of the 'database' constructor parameter is
        still supported.
        """
        import patch_package
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema([], [], [], patch_package)
        resource = ZStormResourceManager({"test": (uri, schema)})
        zstorm = resource.make([])
        store = zstorm.get("test")
        self.assertIsNot(None, store)

    def test_use_global_zstorm(self):
        """
        If the C{use_global_zstorm} attribute is C{True} then the global
        L{ZStorm} will be used.
        """
        self.resource.use_global_zstorm = True
        zstorm = self.resource.make([])
        self.assertIs(global_zstorm, zstorm)

    def test_provide_utility_before_patches(self):
        """
        The L{IZStorm} utility is provided before patches are applied, in order
        to let them get it if they need.
        """
        content = ("from zope.component import getUtility\n"
                   "from storm.zope.interfaces import IZStorm\n"
                   "def apply(store):\n"
                   "    getUtility(IZStorm)\n")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content=content)
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([(1,), (2,)],
                         sorted(store.execute("SELECT version FROM patch")))

    def test_create_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, it's created automatically if it
        doesn't exist yet.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])
        self.assertTrue(os.path.exists(self.resource.schema_stamp_dir))

    def test_use_schema_stamp(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory hasn't been
        changed since the last known upgrade, no schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()

        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        with CaptureTracer() as tracer:
            resource2.make([])

        self.assertEqual([], tracer.queries)

    def test_use_schema_stamp_out_of_date(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory has changed
        a schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content="def apply(store): pass")

        class FakeStat(object):
            st_mtime = os.stat(self.patch_dir).st_mtime + 1

        stat_mock = self.mocker.replace(os.stat)
        stat_mock(self.patch_dir)
        self.mocker.result(FakeStat())
        self.mocker.replay()

        resource2.make([])
        result = self.store.execute("SELECT version FROM patch")
        self.assertEqual([(1,), (2,)], sorted(result.get_all()))
コード例 #43
0
class StormStorageBackend(StorageBackend):
    """Storage back-end based on the Storm ORM framework."""

    def __init__(self):
        self.store = None

    def set_config(self, **kwargs):
        """Set the configuration of this back-end."""
        uri = kwargs['uri']
        database = create_database(uri)
        self.store = Store(database)
        self.logger = logging.getLogger('StormStorageBackend')
        handler = logging.StreamHandler()
        formatter = logging.Formatter(kwargs['log_format'])
        handler.setFormatter(formatter)
        self.logger.addHandler(handler)
        self.logger.setLevel(
            logging.__getattribute__(kwargs['log_level']))

    def create_node(self, node, jid, node_config):
        """Create a PubSub node with the given configuration.

        Creates the Node, NodeConfig, Affiliation and Subscription model for
        the given node.
        """
        self.logger.debug('Creating node %s for jid %s with config %s' %
            (node, jid, node_config))
        new_node = Node(node)
        self.store.add(new_node)
        config = copy.deepcopy(DEFAULT_CONFIG)
        config.update(node_config)
        for key, value in config.items():
            new_node_config = NodeConfig(node, key, value)
            new_node_config.updated = datetime.utcnow()
            self.store.add(new_node_config)
        affiliation = Affiliation(node, jid, u'owner', datetime.utcnow())
        self.store.add(affiliation)
        subscription = Subscription(node, jid, jid, u'subscribed',
                datetime.utcnow())
        self.store.add(subscription)

    def create_channel(self, jid):
        """Create a channel for the given JID.

        Creates all the required PubSub nodes that constitute a channel, with
        the appropriate permissions.
        """
        self.logger.debug('Creating channel for %s' % jid)
        creation_date = unicode(datetime.utcnow().isoformat())
        self.create_node(u'/user/%s/posts' % jid, jid,
            {u'channelType': u'personal',
                u'creationDate': creation_date,
                u'defaultAffiliation': u'publisher',
                u'description': u'buddycloud channel for %s' % jid,
                u'title': jid})
        self.create_node(u'/user/%s/geo/current' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Where %s is at now' % jid,
                u'title': u'%s Current Location' % jid})
        self.create_node(u'/user/%s/geo/next' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Where %s intends to go' % jid,
                u'title': u'%s Next Location' % jid})
        self.create_node(u'/user/%s/geo/previous' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Where %s has been before' % jid,
                u'title': u'%s Previous Location' % jid})
        self.create_node(u'/user/%s/status' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'M000D',
                u'title': u'%s status updates' % jid})
        self.create_node(u'/user/%s/subscriptions' % jid, jid,
            {u'creationDate': creation_date,
                u'description': u'Browse my interests',
                u'title': u'%s subscriptions' % jid})
        self.store.commit()

    def get_node(self, node):
        """Get the requested PubSub node."""
        self.logger.debug('Getting node %s' % node)
        the_node = self.store.get(Node, node)
        self.logger.debug('Returning node %s' % the_node)
        return the_node

    def get_nodes(self):
        """Get a list of all the available PubSub nodes."""
        self.logger.debug('Getting list of available nodes.')
        node_list = self.store.find(Node)
        self.logger.debug('Returning list of available node %s' % node_list)
        return node_list

    def add_item(self, node, item_id, item):
        """Add an item to the requested PubSub node."""
        new_item = Item(node, unicode(item_id), datetime.utcnow(), item)
        self.store.add(new_item)
        self.store.commit()

    def shutdown(self):
        """Shut down this storage module - flush, commit and close the
        store."""
        self.store.flush()
        self.store.commit()
        self.store.close()
コード例 #44
0
class TableReplacer:
    """
    This is the base class used by every Updater
    """
    def __init__(self, old_db_file, new_db_file, start_ver):

        from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5
        from globaleaks.db.update_6_7 import Node_version_6, Context_version_6
        from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \
            Receiver_version_7, InternalFile_version_7
        from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8
        from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \
            Receiver_version_9, User_version_9
        from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10
        from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [
                Node_version_5, Node_version_6, Node_version_7, Node_version_9,
                None, Node_version_11, None, models.Node
            ],
            'User': [
                User_version_5, User_version_9, None, None, None, models.User,
                None, None
            ],
            'Context': [
                Context_version_6, None, Context_version_7, Context_version_8,
                Context_version_11, None, None, models.Context
            ],
            'Receiver': [
                Receiver_version_7, None, None, Receiver_version_8,
                Receiver_version_9, models.Receiver, None, None
            ],
            'ReceiverFile':
            [models.ReceiverFile, None, None, None, None, None, None, None],
            'Notification': [
                Notification_version_7, None, None, Notification_version_8,
                models.Notification, None, None, None
            ],
            'Comment': [
                Comment_version_5, models.Comment, None, None, None, None,
                None, None
            ],
            'InternalTip': [
                InternalTip_version_10, None, None, None, None, None,
                models.InternalTip, None
            ],
            'InternalFile': [
                InternalFile_version_7, None, None, InternalFile_version_10,
                None, None, models.InternalFile, None
            ],
            'WhistleblowerTip': [
                models.WhistleblowerTip, None, None, None, None, None, None,
                None
            ],
            'ReceiverTip':
            [models.ReceiverTip, None, None, None, None, None, None, None],
            'ReceiverInternalTip': [
                models.ReceiverInternalTip, None, None, None, None, None, None,
                None
            ],
            'ReceiverContext':
            [models.ReceiverContext, None, None, None, None, None, None, None],
            'Message':
            [models.Message, None, None, None, None, None, None, None],
            'Stats': [models.Stats, None, None, None, None, None, None, None],
            'ApplicationData': [
                ApplicationData_version_10, None, None, None, None, None, None,
                models.ApplicationData
            ],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -5 because the relase 0,1,2,3,4 are not supported anymore
            assert len(v) == (DATABASE_VERSION + 1 - 5), \
                "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k)

        print "%s Opening old DB: %s" % (self.debug_info, old_db_file)
        old_database = create_database("sqlite:%s" % self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database("sqlite:%s" % new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:

            print "%s Acquire SQL schema %s" % (self.debug_info,
                                                GLSetting.db_schema_file)

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                print "Unable to access %s" % GLSetting.db_schema_file
                raise Exception("Unable to access db schema file")

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f.readlines()).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        print "OperationalError in [%s]" % create_query

            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                print "%s OperationalError in [%s]" % (self.debug_info,
                                                       create_query)
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 5)

        if not self.table_history.has_key(table_name):
            print "Not implemented usage of get_right_model %s (%s %d)" % (
                __file__, table_name, self.start_ver)
            raise NotImplementedError

        assert version <= DATABASE_VERSION, "wrong developer brainsync"

        if self.table_history[table_name][table_index]:
            # print "Immediate return %s = %s at version %d" % \
            #       ( table_name, self.table_history[table_name][table_index], version )
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        #
        # print "Requested version %d of %s need to be collected in the past" %\
        #       (version, table_name)

        while version >= 0:
            if self.table_history[table_name][table_index]:
                # print ".. returning %s = %s" %\
                #           ( table_name, self.table_history[table_name][table_index] )
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):

        print "%s default %s migration assistant: #%d" % (
            self.debug_info, table_name,
            self.store_old.find(
                self.get_right_model(table_name, self.start_ver)).count())

        old_objects = self.store_old.find(
            self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for k, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        print "%s default %s migration assistant" % (self.debug_info,
                                                     table_name)

        old_obj = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for k, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("ApplicationData")
コード例 #45
0
class TestMigrationRegression(unittest.TestCase):
    def _initStartDB(self, target_ver):
        helpers.init_glsettings_for_unit_tests()

        GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test')
        shutil.rmtree(GLSettings.db_path, True)
        os.mkdir(GLSettings.db_path)
        db_name = 'glbackend-%d.db' % target_ver
        db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'db', 'populated', db_name)
        shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name))

        self.db_file = os.path.join(GLSettings.db_path, db_name)
        GLSettings.db_uri = GLSettings.make_db_uri(self.db_file)

        self.store = Store(create_database(GLSettings.db_uri))

    def test_check_field_constraints(self):
        # This test case asserts that a migration from db ver 32 up to the latest
        # db with fields that fail the constraints still functions.
        self._initStartDB(32)

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'reference'
        field_dict['step_id'] = None
        field_dict['field_id'] = None

        db_create_field(self.store, field_dict, u'en')

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'instance'

        db_create_field(self.store, field_dict, u'en')

        field_dict = helpers.get_dummy_field()
        field_dict['instance'] = 'template'
        field_dict['step_id'] = None
        fld_grp_id = self.store.find(Field, Field.fieldgroup_id
                                     is not None)[0].fieldgroup_id
        field_dict['field_id'] = fld_grp_id

        db_create_field(self.store, field_dict, u'en')
        self.store.commit()

        ret = update_db()
        shutil.rmtree(GLSettings.db_path)
        self.assertNotEqual(ret, -1)

    def test_check_unmodifiable_strings(self):
        # This test case asserts that data migration updates unmodifiable l10n strings
        self._initStartDB(34)

        notification_l10n = NotificationL10NFactory(self.store)

        t0 = notification_l10n.get_val('export_template', 'it')

        notification_l10n.set_val('export_template', 'it', '')

        t1 = notification_l10n.get_val('export_template', 'it')

        self.assertEqual(t1, '')

        self.store.commit()

        # place a dummy version in the current db
        store = Store(create_database(GLSettings.db_uri))
        prv = config.PrivateFactory(store)
        self.dummy_ver = '2.XX.XX'
        prv.set_val('version', self.dummy_ver)
        self.assertEqual(prv.get_val('version'), self.dummy_ver)
        store.commit()
        store.close()

        migration.perform_data_update(self.db_file)

        store = Store(create_database(GLSettings.db_uri))
        notification_l10n = NotificationL10NFactory(store)
        t2 = notification_l10n.get_val('export_template', 'it')
        self.assertEqual(t2, t0)
        store.commit()
        store.close()

        shutil.rmtree(GLSettings.db_path)

    def test_mig_37_valid_tor_hs_key(self):
        self._initStartDB(36)

        from globaleaks.db.migrations import update_37
        t = update_37.TOR_DIR
        update_37.TOR_DIR = GLSettings.db_path

        pk_path = os.path.join(update_37.TOR_DIR, 'private_key')
        hn_path = os.path.join(update_37.TOR_DIR, 'hostname')

        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/private_key'), pk_path)
        shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/hostname'), hn_path)

        ret = update_db()
        self.assertEqual(ret, None)

        new_uri = GLSettings.make_db_uri(
            os.path.join(GLSettings.db_path, GLSettings.db_file_name))
        store = Store(create_database(new_uri))
        hs = config.NodeFactory(store).get_val('onionservice')
        pk = config.PrivateFactory(store).get_val('tor_onion_key')

        self.assertEqual('lftx7dbyvlc5txtl.onion', hs)
        with open(os.path.join(helpers.DATA_DIR,
                               'tor/ephemeral_service_key')) as f:
            saved_key = f.read().strip()
        self.assertEqual(saved_key, pk)

        store.close()

        shutil.rmtree(GLSettings.db_path)
        update_37.TOR_DIR = t
コード例 #46
0
ファイル: issues_log.py プロジェクト: sferdi/lucenebug
class IssuesLog():
    def __init__(self):
        self._connect()
        # it is not incremental so we first drop the table
        self._drop_db()
        self._create_db()

    def _connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':' +
                                        opts.db_password_out + '@' +
                                        opts.db_hostname_out + ':' +
                                        opts.db_port_out + '/' +
                                        opts.db_database_out)
        self.store = Store(self.database)

    def _create_db(self):
        self.store.execute(self._get_sql_create())

    def _drop_db(self):
        self.store.execute(self._get_sql_drop())

    def _get_people_id(self, email):
        """
        Gets the id of an user
        """
        try:
            p = self.store.find(DBPeople, DBPeople.email == email).one()
            return p.id
        except (AttributeError, NotOneError):
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def _get_sql_drop(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_sql_create(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def _copy_issue_ext(self, aux, db_ilog):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    # TODO: reuse _copy_standard_values
    def _copy_issue(self, db_ilog):
        """
        This method returns a copy of the DB*Log object
        """
        aux = self._get_dbissues_object(db_ilog.issue, db_ilog.tracker_id)
        aux.issue_id = db_ilog.issue_id
        aux.change_id = db_ilog.change_id
        aux.type = db_ilog.type
        aux.summary = db_ilog.summary
        aux.description = db_ilog.description
        aux.status = db_ilog.status
        aux.resolution = db_ilog.resolution
        aux.priority = db_ilog.priority
        aux.submitted_by = db_ilog.submitted_by
        aux.date = db_ilog.date
        aux.assigned_to = db_ilog.assigned_to
        aux = self._copy_issue_ext(aux, db_ilog)
        return aux

    def _assign_values(self, db_ilog, field, value):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes " +
                                    "WHERE issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            values = self.store.execute(
                "SELECT old_value FROM changes WHERE issue_id=%s AND \
                field=\"%s\" ORDER BY changed_on LIMIT 1" %
                (db_ilog.issue_id, f[0]))
            for v in values:
                db_ilog = self._assign_values(db_ilog, f[0], v[0])
# Initial status does not have a real change
            db_ilog.change_id = 0
        return db_ilog

    def _get_dbissues_object(self, issue_name, tracker_id):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _copy_standard_values(self, issue, issue_log):
        """
        Copy the standard values from the issue object to the issue_log object
        """
        issue_log.issue_id = issue.id
        issue_log.type = issue.type
        issue_log.summary = issue.summary
        issue_log.description = issue.description
        issue_log.status = issue.status
        issue_log.resolution = issue.resolution
        issue_log.priority = issue.priority
        issue_log.submitted_by = issue.submitted_by
        issue_log.date = issue.submitted_on
        issue_log.assigned_to = issue.assigned_to
        return issue_log

    def _print_final_msg(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_changes(self, issue_id):
        aux = self.store.execute("SELECT id, field, new_value, changed_by, \
        changed_on FROM changes where issue_id=%s" % (issue_id))
        return aux

    def _post_history(self, issue_id):
        """
        Abstract method for inserting extra data usign full issue history
        """
        pass

    def run(self):
        ndone = 0
        issues = self.store.find(DBIssue)
        total = str(issues.count())
        print("[IssuesLog] Total issues to analyze: " + str(issues.count()))
        for i in issues:
            if (ndone % 1000 == 0):
                print("[IssuesLog] Analyzed " + str(ndone) + "/" + str(total))
            db_ilog = self._get_dbissues_object(i.issue, i.tracker_id)
            db_ilog = self._copy_standard_values(i, db_ilog)
            final_status = db_ilog.status

            db_ilog = self._build_initial_state(db_ilog)

            self.store.add(db_ilog)
            self.store.flush()

            # the code below gets all the changes and insert a row per change
            changes = self._get_changes(db_ilog.issue_id)

            for ch in changes:
                change_id = ch[0]
                field = ch[1]
                new_value = ch[2]
                changed_by = ch[3]
                date = ch[4]
                # we need a new object to be inserted in the database
                db_ilog = self._copy_issue(db_ilog)
                db_ilog.date = date
                db_ilog.change_id = change_id
                db_ilog.submitted_by = changed_by
                db_ilog = self._assign_values(db_ilog, field, new_value)

                try:
                    self.store.add(db_ilog)
                    self.store.flush()
                except:
                    # self.store.rollback() # is this useful in this context?
                    traceback.print_exc()
            ##self._post_history(db_ilog, final_status)
            self.store.commit()
            ndone += 1
        self._print_final_msg()
コード例 #47
0
class ZStormResourceManagerTest(TestHelper):
    def is_supported(self):
        return has_transaction and has_zope_component and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        package_dir = self.makeDir()
        sys.path.append(package_dir)
        self.patch_dir = os.path.join(package_dir, "patch_package")
        os.mkdir(self.patch_dir)
        self.makeFile(path=os.path.join(self.patch_dir, "__init__.py"),
                      content="")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema(create, drop, delete, PatchSet(patch_package))
        self.databases = [{"name": "test", "uri": uri, "schema": schema}]
        self.resource = ZStormResourceManager(self.databases)
        self.resource.vertical_patching = False
        self.store = Store(create_database(uri))

    def tearDown(self):
        global_zstorm._reset()
        del sys.modules["patch_package"]
        sys.modules.pop("patch_package.patch_1", None)
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_lazy(self):
        """
        L{ZStormResourceManager.make} does not create all stores upfront, but
        only when they're actually used, likewise L{ZStorm.get}.
        """
        zstorm = self.resource.make([])
        self.assertEqual([], list(zstorm.iterstores()))
        store = zstorm.get("test")
        self.assertEqual([("test", store)], list(zstorm.iterstores()))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_upgrade_unknown_patch(self):
        """
        L{ZStormResourceManager.make} resets the schema if an unknown patch
        is found
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("INSERT INTO patch VALUES (2)")
        self.store.execute("CREATE TABLE test (foo TEXT, egg BOOL)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))
        self.assertEqual([(1, )],
                         list(store.execute("SELECT version FROM patch")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_make_commits_transaction_once(self):
        """
        L{ZStormResourceManager.make} commits schema changes only once
        across all stores, after all patch and delete statements have
        been executed.
        """
        database2 = {
            "name": "test2",
            "uri": "sqlite:///%s" % self.makeFile(),
            "schema": self.databases[0]["schema"]
        }
        self.databases.append(database2)
        other_store = Store(create_database(database2["uri"]))
        for store in [self.store, other_store]:
            store.execute("CREATE TABLE patch "
                          "(version INTEGER NOT NULL PRIMARY KEY)")
            store.execute("CREATE TABLE test (foo TEXT)")
            store.execute("INSERT INTO test (foo) VALUES ('data')")
            store.commit()

        with CaptureTracer() as tracer:
            zstorm = self.resource.make([])

        self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:])
        store1 = zstorm.get("test")
        store2 = zstorm.get("test2")
        self.assertEqual([], list(store1.execute("SELECT foo FROM test")))
        self.assertEqual([], list(store2.execute("SELECT foo FROM test")))

    def test_make_zstorm_overwritten(self):
        """
        L{ZStormResourceManager.make} registers its own ZStorm again if a test
        has registered a new ZStorm utility overwriting the resource one.
        """
        zstorm = self.resource.make([])
        provideUtility(ZStorm())
        self.resource.make([])
        self.assertIs(zstorm, getUtility(IZStorm))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """
        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_clean_with_force_delete(self):
        """
        If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete}
        is always invoked upon test cleanup.
        """
        zstorm = self.resource.make([])
        zstorm.get("test")  # Force the creation of the store
        self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        self.store.commit()
        self.resource.force_delete = True
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_wb_clean_clears_alive_cache_before_abort(self):
        """
        L{ZStormResourceManager.clean} clears the alive cache before
        aborting the transaction.
        """
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(1))
        store.add(Test(2))
        real_invalidate = store.invalidate

        def invalidate_proxy():
            self.assertEqual(0, len(list(store._alive.values())))
            real_invalidate()

        store.invalidate = invalidate_proxy

        self.resource.clean(zstorm)

    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch", )], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))

    def test_schema_uri_with_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, and the stamp indicates there's no
        need to update the schema, the resource clean up code will still
        connect as schema user if it needs to run the schema delete statements
        because of a commit.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.databases[0]["schema-uri"] = self.databases[0]["uri"]
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir
        zstorm = resource2.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo) VALUES ('data')")
        store.commit()  # Committing will force a schema.delete() run
        resource2.clean(zstorm)
        self.assertEqual([], list(store.execute("SELECT * FROM test")))

    def test_no_schema(self):
        """
        A particular database may have no schema associated.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([],
                         list(store.execute("SELECT * FROM sqlite_master")))

    def test_no_schema_clean(self):
        """
        A particular database may have no schema associated. If it's committed
        during tests, it will just be skipped when cleaning up tables.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.commit()

        with CaptureTracer() as tracer:
            self.resource.clean(zstorm)

        self.assertEqual([], tracer.queries)

    def test_deprecated_database_format(self):
        """
        The old deprecated format of the 'database' constructor parameter is
        still supported.
        """
        import patch_package
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema([], [], [], patch_package)
        resource = ZStormResourceManager({"test": (uri, schema)})
        zstorm = resource.make([])
        store = zstorm.get("test")
        self.assertIsNot(None, store)

    def test_use_global_zstorm(self):
        """
        If the C{use_global_zstorm} attribute is C{True} then the global
        L{ZStorm} will be used.
        """
        self.resource.use_global_zstorm = True
        zstorm = self.resource.make([])
        self.assertIs(global_zstorm, zstorm)

    def test_provide_utility_before_patches(self):
        """
        The L{IZStorm} utility is provided before patches are applied, in order
        to let them get it if they need.
        """
        content = ("from zope.component import getUtility\n"
                   "from storm.zope.interfaces import IZStorm\n"
                   "def apply(store):\n"
                   "    getUtility(IZStorm)\n")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content=content)
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([(1, ), (2, )],
                         sorted(store.execute("SELECT version FROM patch")))

    def test_create_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, it's created automatically if it
        doesn't exist yet.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])
        self.assertTrue(os.path.exists(self.resource.schema_stamp_dir))

    def test_use_schema_stamp(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory hasn't been
        changed since the last known upgrade, no schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()

        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        with CaptureTracer() as tracer:
            resource2.make([])

        self.assertEqual([], tracer.queries)

    def test_use_schema_stamp_out_of_date(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory has changed
        a schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content="def apply(store): pass")

        class FakeStat(object):
            st_mtime = os.stat(self.patch_dir).st_mtime + 1

        stat_mock = self.mocker.replace(os.stat)
        stat_mock(self.patch_dir)
        self.mocker.result(FakeStat())
        self.mocker.replay()

        resource2.make([])
        result = self.store.execute("SELECT version FROM patch")
        self.assertEqual([(1, ), (2, )], sorted(result.get_all()))
コード例 #48
0
ファイル: patch.py プロジェクト: welitonfreitas/storm-py3
class PatchApplierTest(MockerTestCase):
    def setUp(self):
        super(PatchApplierTest, self).setUp()

        self.patchdir = self.makeDir()
        self.pkgdir = os.path.join(self.patchdir, "mypackage")
        os.makedirs(self.pkgdir)

        f = open(os.path.join(self.pkgdir, "__init__.py"), "w")
        f.write("shared_data = []")
        f.close()

        # Order of creation here is important to try to screw up the
        # patch ordering, as os.listdir returns in order of mtime (or
        # something).
        for pname, data in [("patch_380.py", patch_test_1),
                            ("patch_42.py", patch_test_0)]:
            self.add_module(pname, data)

        sys.path.append(self.patchdir)

        self.filename = self.makeFile()
        self.uri = "sqlite:///%s" % self.filename
        self.store = Store(create_database(self.uri))

        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")

        self.assertFalse(self.store.get(Patch, (42)))
        self.assertFalse(self.store.get(Patch, (380)))

        import mypackage
        self.mypackage = mypackage
        self.patch_set = PatchSet(mypackage)

        # Create another connection just to keep track of the state of the
        # whole transaction manager.  See the assertion functions below.
        self.another_store = Store(create_database("sqlite:"))
        self.another_store.execute("CREATE TABLE test (id INT)")
        self.another_store.commit()
        self.prepare_for_transaction_check()

        class Committer(object):
            def commit(committer):
                self.store.commit()
                self.another_store.commit()

            def rollback(committer):
                self.store.rollback()
                self.another_store.rollback()

        self.committer = Committer()
        self.patch_applier = PatchApplier(self.store, self.patch_set,
                                          self.committer)

    def tearDown(self):
        super(PatchApplierTest, self).tearDown()
        self.committer.rollback()
        sys.path.remove(self.patchdir)
        for name in list(sys.modules):
            if name == "mypackage" or name.startswith("mypackage."):
                del sys.modules[name]

    def add_module(self, module_filename, contents):
        filename = os.path.join(self.pkgdir, module_filename)
        file = open(filename, "w")
        file.write(contents)
        file.close()

    def remove_all_modules(self):
        for filename in os.listdir(self.pkgdir):
            os.unlink(os.path.join(self.pkgdir, filename))

    def prepare_for_transaction_check(self):
        self.another_store.execute("DELETE FROM test")
        self.another_store.execute("INSERT INTO test VALUES (1)")

    def assert_transaction_committed(self):
        self.another_store.rollback()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, (1, ),
                          "Transaction manager wasn't committed.")

    def assert_transaction_aborted(self):
        self.another_store.commit()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, None, "Transaction manager wasn't aborted.")

    def test_apply(self):
        """
        L{PatchApplier.apply} executes the patch with the given version.
        """
        self.patch_applier.apply(42)

        x = getattr(self.mypackage, "patch_42").x
        self.assertEquals(x, 42)
        self.assertTrue(self.store.get(Patch, (42)))
        self.assertTrue("mypackage.patch_42" in sys.modules)

        self.assert_transaction_committed()

    def test_apply_with_patch_directory(self):
        """
        If the given L{PatchSet} uses sub-level patches, then the
        L{PatchApplier.apply} method will look at the per-patch directory and
        apply the relevant sub-level patch.
        """
        path = os.path.join(self.pkgdir, "patch_99")
        self.makeDir(path=path)
        self.makeFile(content="", path=os.path.join(path, "__init__.py"))
        self.makeFile(content=patch_test_0, path=os.path.join(path, "foo.py"))
        self.patch_set._sub_level = "foo"
        self.add_module("patch_99/foo.py", patch_test_0)
        self.patch_applier.apply(99)
        self.assertTrue(self.store.get(Patch, (99)))

    def test_apply_all(self):
        """
        L{PatchApplier.apply_all} executes all unapplied patches.
        """
        self.patch_applier.apply_all()

        self.assertTrue("mypackage.patch_42" in sys.modules)
        self.assertTrue("mypackage.patch_380" in sys.modules)

        x = getattr(self.mypackage, "patch_42").x
        y = getattr(self.mypackage, "patch_380").y

        self.assertEquals(x, 42)
        self.assertEquals(y, 380)

        self.assert_transaction_committed()

    def test_apply_exploding_patch(self):
        """
        L{PatchApplier.apply} aborts the transaction if the patch fails.
        """
        self.remove_all_modules()
        self.add_module("patch_666.py", patch_explosion)
        self.assertRaises(StormError, self.patch_applier.apply, 666)

        self.assert_transaction_aborted()

    def test_wb_apply_all_exploding_patch(self):
        """
        When a patch explodes the store is rolled back to make sure
        that any changes the patch made to the database are removed.
        Any other patches that have been applied successfully before
        it should not be rolled back.  Any patches pending after the
        exploding patch should remain unapplied.
        """
        self.add_module("patch_666.py", patch_explosion)
        self.add_module("patch_667.py", patch_after_explosion)
        self.assertEquals(list(self.patch_applier.get_unapplied_versions()),
                          [42, 380, 666, 667])
        self.assertRaises(StormError, self.patch_applier.apply_all)
        self.assertEquals(list(self.patch_applier.get_unapplied_versions()),
                          [666, 667])

    def test_mark_applied(self):
        """
        L{PatchApplier.mark} marks a patch has applied by inserting a new row
        in the patch table.
        """
        self.patch_applier.mark_applied(42)

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertFalse(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_mark_applied_all(self):
        """
        L{PatchApplier.mark_applied_all} marks all pending patches as applied.
        """
        self.patch_applier.mark_applied_all()

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertTrue(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_application_order(self):
        """
        L{PatchApplier.apply_all} applies the patches in increasing version
        order.
        """
        self.patch_applier.apply_all()
        self.assertEquals(self.mypackage.shared_data, [42, 380])

    def test_has_pending_patches(self):
        """
        L{PatchApplier.has_pending_patches} returns C{True} if there are
        patches to be applied, C{False} otherwise.
        """
        self.assertTrue(self.patch_applier.has_pending_patches())
        self.patch_applier.apply_all()
        self.assertFalse(self.patch_applier.has_pending_patches())

    def test_abort_if_unknown_patches(self):
        """
        L{PatchApplier.mark_applied} raises and error if the patch table
        contains patches without a matching file in the patch module.
        """
        self.patch_applier.mark_applied(381)
        self.assertRaises(UnknownPatchError, self.patch_applier.apply_all)

    def test_get_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns the versions of all
        unapplied patches.
        """
        patches = [Patch(42), Patch(380), Patch(381)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set([381]),
                         patch_applier.get_unknown_patch_versions())

    def test_no_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns an empty set if
        no patches are unapplied.
        """
        patches = [Patch(42), Patch(380)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set(), patch_applier.get_unknown_patch_versions())

    def test_patch_with_incorrect_apply(self):
        """
        L{PatchApplier.apply_all} raises an error as soon as one of the patches
        to be applied fails.
        """
        self.add_module("patch_999.py", patch_no_args_apply)
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("takes no arguments" in str(e))
            self.assertTrue("TypeError" in str(e))
        else:
            self.fail("BadPatchError not raised")

    def test_patch_with_missing_apply(self):
        """
        L{PatchApplier.apply_all} raises an error if one of the patches to
        to be applied has no 'apply' function defined.
        """
        self.add_module("patch_999.py", patch_missing_apply)
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("no attribute" in str(e))
            self.assertTrue("AttributeError" in str(e))
        else:
            self.fail("BadPatchError not raised")

    def test_patch_with_syntax_error(self):
        """
        L{PatchApplier.apply_all} raises an error if one of the patches to
        to be applied contains a syntax error.
        """
        self.add_module("patch_999.py", "that's not python")
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue(" 999 " in str(e))
            self.assertTrue("SyntaxError" in str(e))
        else:
            self.fail("BadPatchError not raised")

    def test_patch_error_includes_traceback(self):
        """
        The exception raised by L{PatchApplier.apply_all} when a patch fails
        include the relevant traceback from the patch.
        """
        self.add_module("patch_999.py", patch_name_error)
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("NameError" in str(e))
            self.assertTrue("blah" in str(e))
            formatted = traceback.format_exc()
            self.assertTrue("# Comment" in formatted)
        else:
            self.fail("BadPatchError not raised")
コード例 #49
0
ファイル: base_updater.py プロジェクト: RuanAragao/GlobaLeaks
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """

    def __init__(self, old_db_file, new_db_file, start_ver):
        from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8
        from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9
        from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10
        from globaleaks.db.update_11_12 import Node_v_11, Context_v_11
        from globaleaks.db.update_12_13 import Node_v_12, Context_v_12
        from globaleaks.db.update_13_14 import Node_v_13, Context_v_13
        from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \
            InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14
        from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15
        from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16
        from globaleaks.db.update_17_18 import Node_v_17
        from globaleaks.db.update_18_19 import Node_v_18
        from globaleaks.db.update_19_20 import Node_v_19, Notification_v_19, Comment_v_19, Message_v_19, \
            InternalTip_v_19, ReceiverTip_v_19, InternalFile_v_19, ReceiverFile_v_19, Receiver_v_19, \
            Context_v_19

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13, Node_v_14, Node_v_16, None, Node_v_17,
                     Node_v_18, Node_v_19, models.Node],
            'User': [User_v_9, None, User_v_14, None, None, None, None, models.User, None, None, None, None, None],
            'Context': [Context_v_8, Context_v_11, None, None, Context_v_12, Context_v_13, Context_v_14, Context_v_19,
                        None, None, None, None, models.Context],
            'Receiver': [Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None, None, Receiver_v_15,
                         Receiver_v_16, Receiver_v_19, None, None, models.Receiver],
            'ReceiverFile': [ReceiverFile_v_19, None, None, None, None, None, None, None, None, None, None, None,
                             models.ReceiverFile],
            'Notification': [Notification_v_8, Notification_v_14, None, None, None, None, None, Notification_v_15,
                             Notification_v_16, Notification_v_19, None, None, models.Notification],
            'Comment': [Comment_v_14, None, None, None, None, None, None, Comment_v_19, None, None, None, None,
                        models.Comment],
            'InternalTip': [InternalTip_v_10, None, None, InternalTip_v_14, None, None, None, InternalTip_v_19, None,
                            None, None, None, models.InternalTip],
            'InternalFile': [InternalFile_v_10, None, None, InternalFile_v_19, None, None, None, None, None, None, None,
                             None, models.InternalFile],
            'WhistleblowerTip': [models.WhistleblowerTip, None, None, None, None, None, None, None, None, None, None,
                                 None, None],
            'ReceiverTip': [ReceiverTip_v_19, None, None, None, None, None, None, None, None, None, None, None,
                            models.ReceiverTip],
            'ReceiverInternalTip': [models.ReceiverInternalTip, None, None, None, None, None, None, None, None, None,
                                    None, None, None],
            'ReceiverContext': [models.ReceiverContext, None, None, None, None, None, None, None, None, None, None,
                                None, None],
            'Message': [Message_v_19, None, None, None, None, None, None, None, None, None, None, None, models.Message],
            'Stats': [Stats_v_14, None, None, None, None, None, None, Stats_v_16, None, models.Stats, None, None, None],
            'ApplicationData': [models.ApplicationData, None, None, None, None, None, None, None, None, None, None,
                                None, None],
            'Field': [models.Field, None, None, None, None, None, None, None, None, None, None, None, None],
            'FieldOption': [models.FieldOption, None, None, None, None, None, None, None, None, None, None, None, None],
            'FieldField': [models.FieldField, None, None, None, None, None, None, None, None, None, None, None, None],
            'Step': [models.Step, None, None, None, None, None, None, None, None, None, None, None, None],
            'StepField': [models.StepField, None, None, None, None, None, None, None, None, None, None, None, None],
            'Anomalies': [models.Anomalies, None, None, None, None, None, None, None, None, None, None, None, None],
            'EventLogs': [models.EventLogs, None, None, None, None, None, None, None, None, None, None, None, None],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 8)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError('Version supplied must be less or equal to {}'.format(
                DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        models_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)
        ).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, models_count))

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(self.debug_info, table_name))

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldOption(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        has been created between 15 and 16!
        should be dropped befor 20
        """
        if self.start_ver < 20:
            return

        self._perform_copy_list("EventLogs")
コード例 #50
0
ファイル: testing.py プロジェクト: quodt/storm
class ZStormResourceManagerTest(TestHelper):

    def is_supported(self):
        return has_zope and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        self._package_dir = self.makeDir()
        sys.path.append(self._package_dir)
        patch_dir = os.path.join(self._package_dir, "patch_package")
        os.mkdir(patch_dir)
        self.makeFile(path=os.path.join(patch_dir, "__init__.py"), content="")
        self.makeFile(path=os.path.join(patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        schema = ZSchema(create, drop, delete, patch_package)
        uri = "sqlite:///%s" % self.makeFile()
        self.resource = ZStormResourceManager({"test": (uri, schema)})
        self.store = Store(create_database(uri))

    def tearDown(self):
        del sys.modules["patch_package"]
        sys.path.remove(self._package_dir)
        if "patch_1" in sys.modules:
            del sys.modules["patch_1"]
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """

        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))
コード例 #51
0
class NCBITaxnomyInserter(object):
    
    def __init__(self, divisions_file_path, taxonomy_divisions_file_path):       
        
        self.included_divisions= {0:"Bacteria",3:"Phages",9:"Viruses", 11:"Environmental samples", 
                1:"Invertebrates", 4:"Plants and Fungi"}

        self.divisions_file_path= divisions_file_path
        self.taxonomy_divisions_file_path= taxonomy_divisions_file_path
        self.__init_database()
        
        if not self.init_tables():
            self.create_tables()
        


    def __init_database(self):    
        """
        creates the sqlite database instance and checks if the database exists in biodb.
        """
        database= create_database("sqlite:%s" % biodb_sql_db_path)
        print "Created storm database from %s." % biodb_sql_db_path
        self.store= Store(database)
        

    def init_tables(self):
        self.biodb_table= "biodb_ncbi"
        self.taxonomy_division_table = "biodb_ncbi_taxonomy_division"
        self.division_table= "biodb_ncbi_division"
      
        #### check if the db_name exists in the database
        table_list= [table[0] for table in self.store.execute('select tbl_name from SQLITE_MASTER')]
       
        return 0 if self.taxonomy_division_table not in table_list else 1 


    def create_tables(self):
        self.create_taxonomy_division_string='CREATE TABLE '+ self.taxonomy_division_table +' (taxonID INTEGER PRIMARY KEY, divisionID INTEGER, FOREIGN KEY (taxonID) REFERENCES '+ self.biodb_table+'(id), FOREIGN KEY (divisionID) REFERENCES '+ self.division_table +'(id) )'
        

        self.create_division_string='CREATE TABLE '+ self.division_table +' (id INTEGER PRIMARY KEY, name VARCHAR)'


        self.store.execute(self.create_taxonomy_division_string)
        self.store.execute(self.create_division_string)

    def insert_division(self, div_id, name):
        div= NCBIDivision()
        div.id = int(div_id)
        div.name= unicode(name)

        self.store.add(div)


    def insert_taxonomy_division(self, taxon_id, div_id):
        n_tax_div= NCBITaxonomyDivision()
        n_tax_div.taxonID= int(taxon_id)
        n_tax_div.divisionID= int(div_id)

        self.store.add(n_tax_div)


    def insert_divisions_from_file(self):
        with open(self.divisions_file_path) as div_file:
            for line in div_file:
                cols= line.rstrip('\n').split(sep)
                div_id= cols[0]
                name= cols[2]
                self.insert_division(div_id, name)

        self.store.commit()

    def insert_taxonomy_divisions_from_file(self):
        i=0
        with open(self.taxonomy_divisions_file_path) as tax_div_file:
            for line in tax_div_file:
                cols= line.rstrip('\n').split(sep)
                
                division_id= int(cols[4].strip())
                
                if division_id in self.included_divisions:
                    tax_id= cols[0].strip()
                    self.insert_taxonomy_division(tax_id, division_id)

                    self.store.commit()

                    i+=1
                    if i % 10000 == 0:
                        print "%d taxa inserted!" %i 
コード例 #52
0
ファイル: patch.py プロジェクト: DamnWidget/mamba-storm
class PatchTest(MockerTestCase):

    def setUp(self):
        super(PatchTest, self).setUp()

        self.patchdir = self.makeDir()
        self.pkgdir = os.path.join(self.patchdir, "mypackage")
        os.makedirs(self.pkgdir)

        f = open(os.path.join(self.pkgdir, "__init__.py"), "w")
        f.write("shared_data = []")
        f.close()

        # Order of creation here is important to try to screw up the
        # patch ordering, as os.listdir returns in order of mtime (or
        # something).
        for pname, data in [("patch_380.py", patch_test_1),
                            ("patch_42.py", patch_test_0)]:
            self.add_module(pname, data)

        sys.path.append(self.patchdir)

        self.filename = self.makeFile()
        self.uri = "sqlite:///%s" % self.filename
        self.store = Store(create_database(self.uri))

        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")

        self.assertFalse(self.store.get(Patch, (42)))
        self.assertFalse(self.store.get(Patch, (380)))

        import mypackage
        self.mypackage = mypackage

        # Create another connection just to keep track of the state of the
        # whole transaction manager.  See the assertion functions below.
        self.another_store = Store(create_database("sqlite:"))
        self.another_store.execute("CREATE TABLE test (id INT)")
        self.another_store.commit()
        self.prepare_for_transaction_check()

        class Committer(object):

            def commit(committer):
                self.store.commit()
                self.another_store.commit()

            def rollback(committer):
                self.store.rollback()
                self.another_store.rollback()

        self.committer = Committer()
        self.patch_applier = PatchApplier(self.store, self.mypackage,
                                          self.committer)

    def tearDown(self):
        super(PatchTest, self).tearDown()
        self.committer.rollback()
        sys.path.remove(self.patchdir)
        for name in list(sys.modules):
            if name == "mypackage" or name.startswith("mypackage."):
                del sys.modules[name]

    def add_module(self, module_filename, contents):
        filename = os.path.join(self.pkgdir, module_filename)
        file = open(filename, "w")
        file.write(contents)
        file.close()

    def remove_all_modules(self):
        for filename in os.listdir(self.pkgdir):
            os.unlink(os.path.join(self.pkgdir, filename))

    def prepare_for_transaction_check(self):
        self.another_store.execute("DELETE FROM test")
        self.another_store.execute("INSERT INTO test VALUES (1)")

    def assert_transaction_committed(self):
        self.another_store.rollback()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, (1,),
                          "Transaction manager wasn't committed.")

    def assert_transaction_aborted(self):
        self.another_store.commit()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, None,
                          "Transaction manager wasn't aborted.")

    def test_apply(self):
        """
        L{PatchApplier.apply} executes the patch with the given version.
        """
        self.patch_applier.apply(42)

        x = getattr(self.mypackage, "patch_42").x
        self.assertEquals(x, 42)
        self.assertTrue(self.store.get(Patch, (42)))
        self.assertTrue("mypackage.patch_42" in sys.modules)

        self.assert_transaction_committed()

    def test_apply_all(self):
        """
        L{PatchApplier.apply_all} executes all unapplied patches.
        """
        self.patch_applier.apply_all()

        self.assertTrue("mypackage.patch_42" in sys.modules)
        self.assertTrue("mypackage.patch_380" in sys.modules)

        x = getattr(self.mypackage, "patch_42").x
        y = getattr(self.mypackage, "patch_380").y

        self.assertEquals(x, 42)
        self.assertEquals(y, 380)

        self.assert_transaction_committed()

    def test_apply_exploding_patch(self):
        """
        L{PatchApplier.apply} aborts the transaction if the patch fails.
        """
        self.remove_all_modules()
        self.add_module("patch_666.py", patch_explosion)
        self.assertRaises(StormError, self.patch_applier.apply, 666)

        self.assert_transaction_aborted()

    def test_wb_apply_all_exploding_patch(self):
        """
        When a patch explodes the store is rolled back to make sure
        that any changes the patch made to the database are removed.
        Any other patches that have been applied successfully before
        it should not be rolled back.  Any patches pending after the
        exploding patch should remain unapplied.
        """
        self.add_module("patch_666.py", patch_explosion)
        self.add_module("patch_667.py", patch_after_explosion)
        self.assertEquals(list(self.patch_applier._get_unapplied_versions()),
                          [42, 380, 666, 667])
        self.assertRaises(StormError, self.patch_applier.apply_all)
        self.assertEquals(list(self.patch_applier._get_unapplied_versions()),
                          [666, 667])

    def test_mark_applied(self):
        """
        L{PatchApplier.mark} marks a patch has applied by inserting a new row
        in the patch table.
        """
        self.patch_applier.mark_applied(42)

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertFalse(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_mark_applied_all(self):
        """
        L{PatchApplier.mark_applied_all} marks all pending patches as applied.
        """
        self.patch_applier.mark_applied_all()

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertTrue(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_application_order(self):
        """
        L{PatchApplier.apply_all} applies the patches in increasing version
        order.
        """
        self.patch_applier.apply_all()
        self.assertEquals(self.mypackage.shared_data,
                          [42, 380])

    def test_has_pending_patches(self):
        """
        L{PatchApplier.has_pending_patches} returns C{True} if there are
        patches to be applied, C{False} otherwise.
        """
        self.assertTrue(self.patch_applier.has_pending_patches())
        self.patch_applier.apply_all()
        self.assertFalse(self.patch_applier.has_pending_patches())

    def test_abort_if_unknown_patches(self):
        """
        L{PatchApplier.mark_applied} raises and error if the patch table
        contains patches without a matching file in the patch module.
        """
        self.patch_applier.mark_applied(381)
        self.assertRaises(UnknownPatchError, self.patch_applier.apply_all)

    def test_get_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns the versions of all
        unapplied patches.
        """
        patches = [Patch(42), Patch(380), Patch(381)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set([381]),
                         patch_applier.get_unknown_patch_versions())

    def test_no_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns an empty set if
        no patches are unapplied.
        """
        patches = [Patch(42), Patch(380)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set(), patch_applier.get_unknown_patch_versions())

    def test_patch_with_incorrect_apply(self):
        """
        L{PatchApplier.apply_all} raises an error as soon as one of the patches
        to be applied fails.
        """
        self.add_module("patch_999.py", patch_no_args_apply)
        try:
            self.patch_applier.apply_all()
        except BadPatchError, e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("takes no arguments" in str(e))
            self.assertTrue("TypeError" in str(e))
        else:
コード例 #53
0
ファイル: base_updater.py プロジェクト: nsfw/GlobaLeaks
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """

    def __init__(self, table_history, old_db_file, new_db_file, start_ver):
        self.table_history = table_history
        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        for k, v in table_history.iteritems():
            length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSettings.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSettings.db_schema_file))

            if not os.access(GLSettings.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSettings.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSettings.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):
        table_index = (version - FIRST_DATABASE_VERSION_SUPPORTED)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError('Version supplied must be less or equal to {}'.format(
                DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        objs_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)
        ).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, objs_count))

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(self.debug_info, table_name))

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        Stats has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        Field has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldAttr(self):
        """
        FieldAttr has been created between 22 and 23!
        """
        if self.start_ver < 23:
            return

        self._perform_copy_list("FieldAttr")

    def migrate_FieldOption(self):
        """
        FieldOption has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        FieldField has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        Step has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        StepField has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        Anomalies has been created between 14 and 15!
        should be dropped before 22
        """
        if self.start_ver < 23:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        EventLogs has been created between 15 and 16!
        should be dropped before 20
        """
        if self.start_ver < 20:
            return

        self._perform_copy_list("EventLogs")
コード例 #54
0
ファイル: issues_log.py プロジェクト: brainwane/Bicho
class IssuesLog():

    def __init__(self):
        self._connect()
        # it is not incremental so we first drop the table
        self._drop_db()
        self._create_db()

    def _connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':'
                                        + opts.db_password_out + '@'
                                        + opts.db_hostname_out + ':'
                                        + opts.db_port_out + '/'
                                        + opts.db_database_out)
        self.store = Store(self.database)

    def _create_db(self):
        self.store.execute(self._get_sql_create())

    def _drop_db(self):
        self.store.execute(self._get_sql_drop())

    def _get_people_id(self, email):
        """
        Gets the id of an user
        """
        try:
            p = self.store.find(DBPeople, DBPeople.email == email).one()
            return p.id
        except (AttributeError, NotOneError):
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def _get_sql_drop(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_sql_create(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def _copy_issue_ext(self, aux, db_ilog):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    # TODO: reuse _copy_standard_values
    def _copy_issue(self, db_ilog):
        """
        This method returns a copy of the DB*Log object
        """
        aux = self._get_dbissues_object(db_ilog.issue, db_ilog.tracker_id)
        aux.issue_id = db_ilog.issue_id
        aux.change_id = db_ilog.change_id
        aux.changed_by = db_ilog.changed_by
        aux.type = db_ilog.type
        aux.summary = db_ilog.summary
        aux.description = db_ilog.description
        aux.status = db_ilog.status
        aux.resolution = db_ilog.resolution
        aux.priority = db_ilog.priority
        aux.submitted_by = db_ilog.submitted_by
        aux.date = db_ilog.date
        aux.assigned_to = db_ilog.assigned_to
        aux = self._copy_issue_ext(aux, db_ilog)
        return aux

    def _assign_values(self, db_ilog, field, value):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes " +
                                    "WHERE issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            values = self.store.execute(
                "SELECT old_value FROM changes WHERE issue_id=%s AND \
                field=\"%s\" ORDER BY changed_on LIMIT 1"
                % (db_ilog.issue_id, f[0]))
            for v in values:
                db_ilog = self._assign_values(db_ilog, f[0], v[0])
        return db_ilog

    def _get_dbissues_object(self, issue_name, tracker_id):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _copy_standard_values(self, issue, issue_log):
        """
        Copy the standard values from the issue object to the issue_log object
        """
        issue_log.issue_id = issue.id
        issue_log.type = issue.type
        issue_log.summary = issue.summary
        issue_log.description = issue.description
        issue_log.status = issue.status
        issue_log.resolution = issue.resolution
        issue_log.priority = issue.priority
        issue_log.submitted_by = issue.submitted_by
        issue_log.date = issue.submitted_on
        issue_log.assigned_to = issue.assigned_to
        return issue_log

    def _print_final_msg(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_changes(self, issue_id):
        aux = self.store.execute("SELECT id, field, new_value, changed_by, \
        changed_on FROM changes where issue_id=%s" % (issue_id))
        return aux

    def _post_history(self, issue_id):
        """
        Abstract method for inserting extra data usign full issue history
        """
        pass

    def run(self):
        ndone = 0
        issues = self.store.find(DBIssue)
        total = str(issues.count())
        print ("[IssuesLog] Total issues to analyze: " + str(issues.count()))
        for i in issues:
            if (ndone % 1000 == 0):
                print ("[IssuesLog] Analyzed " + str(ndone) + "/" + str(total))
            db_ilog = self._get_dbissues_object(i.issue, i.tracker_id)
            db_ilog = self._copy_standard_values(i, db_ilog)
            final_status = db_ilog.status

            db_ilog = self._build_initial_state(db_ilog)

            self.store.add(db_ilog)
            self.store.flush()

            # the code below gets all the changes and insert a row per change
            changes = self._get_changes(db_ilog.issue_id)

            for ch in changes:
                change_id = ch[0]
                field = ch[1]
                new_value = ch[2]
                changed_by = ch[3]
                date = ch[4]
                # we need a new object to be inserted in the database
                db_ilog = self._copy_issue(db_ilog)
                db_ilog.date = date
                db_ilog.change_id = change_id
                db_ilog.changed_by = changed_by
                db_ilog = self._assign_values(db_ilog, field, new_value)

                try:
                    self.store.add(db_ilog)
                    self.store.flush()
                except:
                    # self.store.rollback() # is this useful in this context?
                    traceback.print_exc()
            self._post_history(db_ilog, final_status)
            self.store.commit()
            ndone += 1
        self._print_final_msg()
コード例 #55
0
ファイル: migration.py プロジェクト: comradekingu/GlobaLeaks
def perform_version_update(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg("Migrations from DB version lower than %d are no more supported!" % FIRST_DATABASE_VERSION_SUPPORTED)
        GLSettings.print_msg("If you can't create your Node from scratch, contact us asking for support.")
        quit()

    tmpdir =  os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    old_db_file = None
    new_db_file = None

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module("globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration prologue: %s" % exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg("Failure while migrating table %s: %s " % (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg("Failure while executing migration epilogue: %s " % exception)
                    raise exception

            finally:
                # the database should bee always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(create_database('sqlite:' + new_db_file))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[model_name] is not None and migration_script.model_to[model_name] is not None:
                     count = store_verify.find(migration_script.model_to[model_name]).count()
                     if migration_script.entries_count[model_name] != count:
                         if migration_script.fail_on_count_mismatch[model_name]:
                             raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                  (model_name, count, migration_script.entries_count[model_name]))
                         else:
                             GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                  (model_name, migration_script.entries_count[model_name], count))
                     else:
                         GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                              (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

        store_appdata = Store(create_database('sqlite:' + new_db_file))
        db_update_appdata(store_appdata)
        db_fix_fields_attrs(store_appdata)
        store_appdata.commit()
        store_appdata.close()

    except Exception as exception:
        print exception
        # simply propagage the exception
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(new_db_file, final_db_file)
        os.remove(orig_db_file)

    finally:
        # always cleanup the temporary directory used for the migration
        shutil.rmtree(tmpdir, True)