Exemplo n.º 1
0
class BioDBBase(object):
    def __init__(self, db_name):       
        self.db_name= db_name
        self.__init_database()


    def __init_database(self):    
        """
        creates the sqlite database instance and checks if the database exists in biodb.
        """
        database= create_database("sqlite:%s" % biodb_sql_db_path)
        print "Created storm database from %s." % biodb_sql_db_path
        self.store= Store(database)
        

    def init_table(self):
        self.biodb_table = "biodb_" + self.db_name
        self.hier_table= "hier_"+ self.db_name
        self.lineage_table= "lineage_"+ self.db_name
      
        BioDB.__storm_table__ = self.biodb_table
        Hierarchy.__storm_table__ = self.hier_table
        Lineage.__storm_table__ = self.lineage_table
        #### check if the db_name exists in the database
        table_list= [table[0] for table in self.store.execute('select tbl_name from SQLITE_MASTER')]
       
        return 0 if self.biodb_table not in table_list else 1 
Exemplo n.º 2
0
    def _checkValidDatabase(self, storage):
        '''Checks the Store to make sure it has a valid database'''

        store = Store(storage)
        for table in SCHEMA.iterkeys():
            result = store.execute('SELECT * FROM `%s`' % table.lower())
            self.assertEqual(result.get_all(), [])
        return True
Exemplo n.º 3
0
    def _checkValidDatabase(self, storage):
        '''Checks the Store to make sure it has a valid database'''

        store = Store(storage)
        for table in SCHEMA.iterkeys():
            result = store.execute('SELECT * FROM `%s`' % table.lower())
            self.assertEqual(result.get_all(), [])
        return True
Exemplo n.º 4
0
def createTable(model, transactor, database):
    """
    Create the table for the specified model.
    Specification of a transactor and database is useful in unittesting.
    """
    if not transactor:
        from oonib.db import transactor
    if not database:
        from oonib.db import database
    store = Store(database)
    create_query = generateCreateQuery(model)
    try:
        store.execute(create_query)
    # XXX trap the specific error that is raised when the table exists
    except StormError, e:
        print "Failed to create table!"
        print e
        store.close()
Exemplo n.º 5
0
def createTable(model, transactor, database):
    """
    Create the table for the specified model.
    Specification of a transactor and database is useful in unittesting.
    """
    if not transactor:
        from oonib.db import transactor
    if not database:
        from oonib.db import database
    store = Store(database)
    create_query = generateCreateQuery(model)
    try:
        store.execute(create_query)
    # XXX trap the specific error that is raised when the table exists
    except StormError, e:
        print "Failed to create table!"
        print e
        store.close()
Exemplo n.º 6
0
def getStore(url, create = False):
    # register new Storm scheme
    register_scheme("sqlitefk", ForeignKeysSQLite)

    d = create_database(url)
    s = Store(d)

    if create:
        schema = file(os.path.join(os.path.dirname(__file__), "schema.sql"), "r").read().split("\n\n")
        for cmd in schema:
            s.execute(cmd)

        version = Meta()
        version.key = u"created"
        s.add(version)

        s.commit()

    return s
Exemplo n.º 7
0
 def test_create(self):
     """
     L{Schema.create} can be used to create the tables of a L{Store}.
     """
     self.assertRaises(StormError, self.store.execute,
                       "SELECT * FROM person")
     self.schema.create(self.store)
     self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])
     # By default changes are committed
     store2 = Store(self.database)
     self.assertEquals(list(store2.execute("SELECT * FROM person")), [])
Exemplo n.º 8
0
def createTable(model, transactor, database):
    """
    Create the table for the specified model.
    It will default to using globaleaks.db transactor and database if none is
    specified.
    Specification of a transactor and database is useful in unittesting.
    """
    if not transactor:
        from globaleaks.db import transactor
    if not database:
        from globaleaks.db import database
    store = Store(database)
    create_query = generateCreateQuery(model)

    try:
        store.execute(create_query)
    # XXX trap the specific error that is raised when the table exists
    # seem to be OperationalError raised, but not a specific error exists.
    except StormError, e:
        print "Failed to create table!", e
        store.close()
Exemplo n.º 9
0
    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch",)], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))
Exemplo n.º 10
0
    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch", )], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))
def perform_storm_benchmark(database, conn_str, args, benchmark_result):
    if database == 'sqlite':
        if conn_str == ':memory:':
            conn_str = 'sqlite:'
    db = create_database(conn_str)
    store = Store(db)
    if database == 'sqlite':
        store.execute("""
            CREATE TABLE person
            (id INTEGER PRIMARY KEY, name VARCHAR)
        """)
        store.execute("""
            CREATE TABLE address
            (id INTEGER PRIMARY KEY, address VARCHAR, person_id INTEGER,
            FOREIGN KEY(person_id) REFERENCES person(id))
        """)
    elif database == 'mysql':
        store.execute("""
            CREATE TABLE person
            (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
             name VARCHAR(256))
        """)
        store.execute("""
            CREATE TABLE address
            (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
             address VARCHAR(256),
             person_id INT,
             FOREIGN KEY(person_id) REFERENCES person(id))
        """)
    __builtin__.__dict__.update(locals())
    test_data = test_data_from_args(args)
    if 'Storm' not in benchmark_result:
        benchmark_result['Storm'] = dict()
    if database not in benchmark_result['Storm']:
        benchmark_result['Storm'][database] = dict()
    test_aspects = ['insert', 'read', 'update', 'delete']
    timeit_funcs = [
        "_{0}_{1}_data(test_data, store)".format(
            'storm', test_aspect
        )
        for test_aspect in test_aspects
    ]
    for index, tf in enumerate(timeit_funcs):
        rst = timeit.timeit(tf, number=args.num_repeats)
        benchmark_result['Storm'][database][test_aspects[index]] = rst
def perform_storm_benchmark(database, conn_str, args, benchmark_result):
    if database == 'sqlite':
        if conn_str == ':memory:':
            conn_str = 'sqlite:'
    db = create_database(conn_str)
    store = Store(db)
    if database == 'sqlite':
        store.execute("""
            CREATE TABLE person
            (id INTEGER PRIMARY KEY, name VARCHAR)
        """)
        store.execute("""
            CREATE TABLE address
            (id INTEGER PRIMARY KEY, address VARCHAR, person_id INTEGER,
            FOREIGN KEY(person_id) REFERENCES person(id))
        """)
    elif database == 'mysql':
        store.execute("""
            CREATE TABLE person
            (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
             name VARCHAR(256))
        """)
        store.execute("""
            CREATE TABLE address
            (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
             address VARCHAR(256),
             person_id INT,
             FOREIGN KEY(person_id) REFERENCES person(id))
        """)
    __builtin__.__dict__.update(locals())
    test_data = test_data_from_args(args)
    if 'Storm' not in benchmark_result:
        benchmark_result['Storm'] = dict()
    if database not in benchmark_result['Storm']:
        benchmark_result['Storm'][database] = dict()
    test_aspects = ['insert', 'read', 'update', 'delete']
    timeit_funcs = [
        "_{0}_{1}_data(test_data, store)".format('storm', test_aspect)
        for test_aspect in test_aspects
    ]
    for index, tf in enumerate(timeit_funcs):
        rst = timeit.timeit(tf, number=args.num_repeats)
        benchmark_result['Storm'][database][test_aspects[index]] = rst
Exemplo n.º 13
0
    def _create(self):
        '''Create a new entertainer database

        Reads the current database schema dictionary, and creates the sqlite
        database based on that schema
        '''

        store = Store(self._db)
        store.execute("""
        CREATE TABLE `entertainer_data` (
            name VARCHAR PRIMARY KEY,
            value VARCHAR);""")
        store.execute(
        "INSERT INTO `entertainer_data` VALUES ('version', '0.2a');")

        for query in SCHEMA.itervalues():
            store.execute(query, noresult=True)
            store.commit()
Exemplo n.º 14
0
    def _create(self):
        '''Create a new entertainer database

        Reads the current database schema dictionary, and creates the sqlite
        database based on that schema
        '''

        store = Store(self._db)
        store.execute("""
        CREATE TABLE `entertainer_data` (
            name VARCHAR PRIMARY KEY,
            value VARCHAR);""")
        store.execute(
            "INSERT INTO `entertainer_data` VALUES ('version', '0.2a');")

        for query in SCHEMA.itervalues():
            store.execute(query, noresult=True)
            store.commit()
Exemplo n.º 15
0
class ZStormResourceManagerTest(TestHelper):

    def is_supported(self):
        return has_transaction and has_zope_component and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        self._package_dir = self.makeDir()
        sys.path.append(self._package_dir)
        patch_dir = os.path.join(self._package_dir, "patch_package")
        os.mkdir(patch_dir)
        self.makeFile(path=os.path.join(patch_dir, "__init__.py"), content="")
        self.makeFile(path=os.path.join(patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema(create, drop, delete, patch_package)
        self.databases = [{"name": "test", "uri": uri, "schema": schema}]
        self.resource = ZStormResourceManager(self.databases)
        self.store = Store(create_database(uri))

    def tearDown(self):
        del sys.modules["patch_package"]
        sys.path.remove(self._package_dir)
        if "patch_1" in sys.modules:
            del sys.modules["patch_1"]
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_make_zstorm_overwritten(self):
        """
        L{ZStormResourceManager.make} registers its own ZStorm again if a test
        has registered a new ZStorm utility overwriting the resource one.
        """
        zstorm = self.resource.make([])
        provideUtility(ZStorm())
        self.resource.make([])
        self.assertIs(zstorm, getUtility(IZStorm))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """
        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_clean_with_force_delete(self):
        """
        If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete}
        is always invoked upon test cleanup.
        """
        zstorm = self.resource.make([])
        self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        self.store.commit()
        self.resource.force_delete = True
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_wb_clean_clears_alive_cache_before_abort(self):
        """
        L{ZStormResourceManager.clean} clears the alive cache before
        aborting the transaction.
        """
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(1))
        store.add(Test(2))
        real_invalidate = store.invalidate

        def invalidate_proxy():
            self.assertEqual(0, len(store._alive.values()))
            real_invalidate()
        store.invalidate = invalidate_proxy

        self.resource.clean(zstorm)

    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch",)], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))

    def test_deprecated_database_format(self):
        """
        The old deprecated format of the 'database' constructor parameter is
        still supported.
        """
        import patch_package
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema([], [], [], patch_package)
        resource = ZStormResourceManager({"test": (uri, schema)})
        zstorm = resource.make([])
        store = zstorm.get("test")
        self.assertIsNot(None, store)
Exemplo n.º 16
0
class SchemaTest(MockerTestCase):
    def setUp(self):
        super(SchemaTest, self).setUp()
        self.database = create_database("sqlite:///%s" % self.makeFile())
        self.store = Store(self.database)

        self._package_dirs = set()
        self._package_names = set()
        self.package = self.create_package(self.makeDir(), "patch_package")
        import patch_package

        creates = ["CREATE TABLE person (id INTEGER, name TEXT)"]
        drops = ["DROP TABLE person"]
        deletes = ["DELETE FROM person"]

        self.schema = Schema(creates, drops, deletes, patch_package)

    def tearDown(self):
        for package_dir in self._package_dirs:
            sys.path.remove(package_dir)

        for name in list(sys.modules):
            if name in self._package_names:
                del sys.modules[name]
            elif filter(
                    None,
                [name.startswith("%s." % x) for x in self._package_names]):
                del sys.modules[name]

        super(SchemaTest, self).tearDown()

    def create_package(self, base_dir, name, init_module=None):
        """Create a Python package.

        Packages created using this method will be removed from L{sys.path}
        and L{sys.modules} during L{tearDown}.

        @param package_dir: The directory in which to create the new package.
        @param name: The name of the package.
        @param init_module: Optionally, the text to include in the __init__.py
            file.
        @return: A L{Package} instance that can be used to create modules.
        """
        package_dir = os.path.join(base_dir, name)
        self._package_names.add(name)
        os.makedirs(package_dir)

        file = open(os.path.join(package_dir, "__init__.py"), "w")
        if init_module:
            file.write(init_module)
        file.close()
        sys.path.append(base_dir)
        self._package_dirs.add(base_dir)

        return Package(package_dir, name)

    def test_create(self):
        """
        L{Schema.create} can be used to create the tables of a L{Store}.
        """
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM person")
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_create_with_autocommit_off(self):
        """
        L{Schema.autocommit} can be used to turn automatic commits off.
        """
        self.schema.autocommit(False)
        self.schema.create(self.store)
        self.store.rollback()
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM patch")

    def test_drop(self):
        """
        L{Schema.drop} can be used to drop the tables of a L{Store}.
        """
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])
        self.schema.drop(self.store)
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM person")

    def test_delete(self):
        """
        L{Schema.delete} can be used to clear the tables of a L{Store}.
        """
        self.schema.create(self.store)
        self.store.execute("INSERT INTO person (id, name) VALUES (1, 'Jane')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane")])
        self.schema.delete(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_creates_schema(self):
        """
        L{Schema.upgrade} creates a schema from scratch if no exist, and is
        effectively equivalent to L{Schema.create} in such case.
        """
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM person")
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_marks_patches_applied(self):
        """
        L{Schema.upgrade} updates the patch table after applying the needed
        patches.
        """
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        statement = "SELECT * FROM patch"
        self.assertRaises(StormError, self.store.execute, statement)
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM patch")),
                          [(1, )])

    def test_upgrade_applies_patches(self):
        """
        L{Schema.upgrade} executes the needed patches, that typically modify
        the existing schema.
        """
        self.schema.create(self.store)
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.schema.upgrade(self.store)
        self.store.execute(
            "INSERT INTO person (id, name, phone) VALUES (1, 'Jane', '123')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane", u"123")])
Exemplo n.º 17
0
class IssuesLog():

    def __init__(self):
        self._connect()
        # it is not incremental so we first drop the table
        self._drop_db()
        self._create_db()

    def _connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':'
                                        + opts.db_password_out + '@'
                                        + opts.db_hostname_out + ':'
                                        + opts.db_port_out + '/'
                                        + opts.db_database_out)
        self.store = Store(self.database)

    def _create_db(self):
        self.store.execute(self._get_sql_create())

    def _drop_db(self):
        self.store.execute(self._get_sql_drop())

    def _get_people_id(self, email):
        """
        Gets the id of an user
        """
        try:
            p = self.store.find(DBPeople, DBPeople.email == email).one()
            return p.id
        except (AttributeError, NotOneError):
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def _get_sql_drop(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_sql_create(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def _copy_issue_ext(self, aux, db_ilog):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    # TODO: reuse _copy_standard_values
    def _copy_issue(self, db_ilog):
        """
        This method returns a copy of the DB*Log object
        """
        aux = self._get_dbissues_object(db_ilog.issue, db_ilog.tracker_id)
        aux.issue_id = db_ilog.issue_id
        aux.change_id = db_ilog.change_id
        aux.changed_by = db_ilog.changed_by
        aux.type = db_ilog.type
        aux.summary = db_ilog.summary
        aux.description = db_ilog.description
        aux.status = db_ilog.status
        aux.resolution = db_ilog.resolution
        aux.priority = db_ilog.priority
        aux.submitted_by = db_ilog.submitted_by
        aux.date = db_ilog.date
        aux.assigned_to = db_ilog.assigned_to
        aux = self._copy_issue_ext(aux, db_ilog)
        return aux

    def _assign_values(self, db_ilog, field, value):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes " +
                                    "WHERE issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            values = self.store.execute(
                "SELECT old_value FROM changes WHERE issue_id=%s AND \
                field=\"%s\" ORDER BY changed_on LIMIT 1"
                % (db_ilog.issue_id, f[0]))
            for v in values:
                db_ilog = self._assign_values(db_ilog, f[0], v[0])
        return db_ilog

    def _get_dbissues_object(self, issue_name, tracker_id):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _copy_standard_values(self, issue, issue_log):
        """
        Copy the standard values from the issue object to the issue_log object
        """
        issue_log.issue_id = issue.id
        issue_log.type = issue.type
        issue_log.summary = issue.summary
        issue_log.description = issue.description
        issue_log.status = issue.status
        issue_log.resolution = issue.resolution
        issue_log.priority = issue.priority
        issue_log.submitted_by = issue.submitted_by
        issue_log.date = issue.submitted_on
        issue_log.assigned_to = issue.assigned_to
        return issue_log

    def _print_final_msg(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_changes(self, issue_id):
        aux = self.store.execute("SELECT id, field, new_value, changed_by, \
        changed_on FROM changes where issue_id=%s" % (issue_id))
        return aux

    def _post_history(self, issue_id):
        """
        Abstract method for inserting extra data usign full issue history
        """
        pass

    def run(self):
        ndone = 0
        issues = self.store.find(DBIssue)
        total = str(issues.count())
        print ("[IssuesLog] Total issues to analyze: " + str(issues.count()))
        for i in issues:
            if (ndone % 1000 == 0):
                print ("[IssuesLog] Analyzed " + str(ndone) + "/" + str(total))
            db_ilog = self._get_dbissues_object(i.issue, i.tracker_id)
            db_ilog = self._copy_standard_values(i, db_ilog)
            final_status = db_ilog.status

            db_ilog = self._build_initial_state(db_ilog)

            self.store.add(db_ilog)
            self.store.flush()

            # the code below gets all the changes and insert a row per change
            changes = self._get_changes(db_ilog.issue_id)

            for ch in changes:
                change_id = ch[0]
                field = ch[1]
                new_value = ch[2]
                changed_by = ch[3]
                date = ch[4]
                # we need a new object to be inserted in the database
                db_ilog = self._copy_issue(db_ilog)
                db_ilog.date = date
                db_ilog.change_id = change_id
                db_ilog.changed_by = changed_by
                db_ilog = self._assign_values(db_ilog, field, new_value)

                try:
                    self.store.add(db_ilog)
                    self.store.flush()
                except:
                    # self.store.rollback() # is this useful in this context?
                    traceback.print_exc()
            self._post_history(db_ilog, final_status)
            self.store.commit()
            ndone += 1
        self._print_final_msg()
Exemplo n.º 18
0
class SchemaTest(MakePackage, MakePath):
    """Test the Storm Schema Create, Delete, and Drop and upgrade"""
    def setUp(self):
        super(SchemaTest, self).setUp()
        sqlite_path = self.make_path("")
        self.database = create_database("sqlite:///%s" % sqlite_path)
        self.store = Store(self.database)
        self.patch_table = "my_patch_table"

        self.package = self.create_package(self.make_path(), "patch_package")
        # patch_package is created during the tests and is not around during
        # lint checks, so we'll diable the error
        import patch_package

        creates = ["CREATE TABLE person (id INTEGER, name TEXT)"]
        drops = ["DROP TABLE person"]
        deletes = ["DELETE FROM person"]

        self.schema = Schema(creates, drops, deletes, patch_package,
                             self.patch_table)

    def test_create(self):
        """Create a Schema"""
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_drop(self):
        """Drop a Schema"""
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])
        self.schema.drop(self.store)
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")

    def test_delete(self):
        """Delete a Schema"""
        self.schema.create(self.store)
        self.store.execute("INSERT INTO person (id, name) VALUES (1, 'Jane')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane")])
        self.schema.delete(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_creates_schema(self):
        """Upgrade a Schema, aka apply all patches"""
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_marks_patches_applied(self):
        """Test that an upgrade updates the patch table"""
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM %s" % self.patch_table)
        self.schema.upgrade(self.store)
        self.assertEquals(
            list(self.store.execute("SELECT * FROM %s" % self.patch_table)),
            [(1,)])

    def test_upgrade_applies_patches(self):
        """Test that an upgrade actually applies the patches"""
        self.schema.create(self.store)
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.schema.upgrade(self.store)
        self.store.execute(
            "INSERT INTO person (id, name, phone) VALUES (1, 'Jane', '123')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane", u"123")])
Exemplo n.º 19
0
class ZStormResourceManagerTest(TestHelper):

    def is_supported(self):
        return has_transaction and has_zope_component and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        package_dir = self.makeDir()
        sys.path.append(package_dir)
        self.patch_dir = os.path.join(package_dir, "patch_package")
        os.mkdir(self.patch_dir)
        self.makeFile(path=os.path.join(self.patch_dir, "__init__.py"),
                      content="")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema(create, drop, delete, patch_package)
        self.databases = [{"name": "test", "uri": uri, "schema": schema}]
        self.resource = ZStormResourceManager(self.databases)
        self.store = Store(create_database(uri))

    def tearDown(self):
        global_zstorm._reset()
        del sys.modules["patch_package"]
        if "patch_1" in sys.modules:
            del sys.modules["patch_1"]
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_lazy(self):
        """
        L{ZStormResourceManager.make} does not create all stores upfront, but
        only when they're actually used, likewise L{ZStorm.get}.
        """
        zstorm = self.resource.make([])
        self.assertEqual([], list(zstorm.iterstores()))
        store = zstorm.get("test")
        self.assertEqual([("test", store)], list(zstorm.iterstores()))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_upgrade_unknown_patch(self):
        """
        L{ZStormResourceManager.make} resets the schema if an unknown patch
        is found
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("INSERT INTO patch VALUES (2)")
        self.store.execute("CREATE TABLE test (foo TEXT, egg BOOL)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))
        self.assertEqual([(1,)],
                         list(store.execute("SELECT version FROM patch")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_make_commits_transaction_once(self):
        """
        L{ZStormResourceManager.make} commits schema changes only once
        across all stores, after all patch and delete statements have
        been executed.
        """
        database2 = {"name": "test2",
                     "uri": "sqlite:///%s" % self.makeFile(),
                     "schema": self.databases[0]["schema"]}
        self.databases.append(database2)
        other_store = Store(create_database(database2["uri"]))
        for store in [self.store, other_store]:
            store.execute("CREATE TABLE patch "
                          "(version INTEGER NOT NULL PRIMARY KEY)")
            store.execute("CREATE TABLE test (foo TEXT)")
            store.execute("INSERT INTO test (foo) VALUES ('data')")
            store.commit()

        with CaptureTracer() as tracer:
            zstorm = self.resource.make([])

        self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:])
        store1 = zstorm.get("test")
        store2 = zstorm.get("test2")
        self.assertEqual([], list(store1.execute("SELECT foo FROM test")))
        self.assertEqual([], list(store2.execute("SELECT foo FROM test")))

    def test_make_zstorm_overwritten(self):
        """
        L{ZStormResourceManager.make} registers its own ZStorm again if a test
        has registered a new ZStorm utility overwriting the resource one.
        """
        zstorm = self.resource.make([])
        provideUtility(ZStorm())
        self.resource.make([])
        self.assertIs(zstorm, getUtility(IZStorm))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """
        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_clean_with_force_delete(self):
        """
        If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete}
        is always invoked upon test cleanup.
        """
        zstorm = self.resource.make([])
        zstorm.get("test")  # Force the creation of the store
        self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        self.store.commit()
        self.resource.force_delete = True
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_wb_clean_clears_alive_cache_before_abort(self):
        """
        L{ZStormResourceManager.clean} clears the alive cache before
        aborting the transaction.
        """
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(1))
        store.add(Test(2))
        real_invalidate = store.invalidate

        def invalidate_proxy():
            self.assertEqual(0, len(store._alive.values()))
            real_invalidate()
        store.invalidate = invalidate_proxy

        self.resource.clean(zstorm)

    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch",)], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))

    def test_schema_uri_with_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, and the stamp indicates there's no
        need to update the schema, the resource clean up code will still
        connect as schema user if it needs to run the schema delete statements
        because of a commit.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.databases[0]["schema-uri"] = self.databases[0]["uri"]
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir
        zstorm = resource2.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo) VALUES ('data')")
        store.commit()  # Committing will force a schema.delete() run
        resource2.clean(zstorm)
        self.assertEqual([], list(store.execute("SELECT * FROM test")))

    def test_no_schema(self):
        """
        A particular database may have no schema associated.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([],
                         list(store.execute("SELECT * FROM sqlite_master")))

    def test_no_schema_clean(self):
        """
        A particular database may have no schema associated. If it's committed
        during tests, it will just be skipped when cleaning up tables.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.commit()

        with CaptureTracer() as tracer:
            self.resource.clean(zstorm)

        self.assertEqual([], tracer.queries)

    def test_deprecated_database_format(self):
        """
        The old deprecated format of the 'database' constructor parameter is
        still supported.
        """
        import patch_package
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema([], [], [], patch_package)
        resource = ZStormResourceManager({"test": (uri, schema)})
        zstorm = resource.make([])
        store = zstorm.get("test")
        self.assertIsNot(None, store)

    def test_use_global_zstorm(self):
        """
        If the C{use_global_zstorm} attribute is C{True} then the global
        L{ZStorm} will be used.
        """
        self.resource.use_global_zstorm = True
        zstorm = self.resource.make([])
        self.assertIs(global_zstorm, zstorm)

    def test_provide_utility_before_patches(self):
        """
        The L{IZStorm} utility is provided before patches are applied, in order
        to let them get it if they need.
        """
        content = ("from zope.component import getUtility\n"
                   "from storm.zope.interfaces import IZStorm\n"
                   "def apply(store):\n"
                   "    getUtility(IZStorm)\n")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content=content)
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([(1,), (2,)],
                         sorted(store.execute("SELECT version FROM patch")))

    def test_create_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, it's created automatically if it
        doesn't exist yet.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])
        self.assertTrue(os.path.exists(self.resource.schema_stamp_dir))

    def test_use_schema_stamp(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory hasn't been
        changed since the last known upgrade, no schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()

        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        with CaptureTracer() as tracer:
            resource2.make([])

        self.assertEqual([], tracer.queries)

    def test_use_schema_stamp_out_of_date(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory has changed
        a schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content="def apply(store): pass")

        class FakeStat(object):
            st_mtime = os.stat(self.patch_dir).st_mtime + 1

        stat_mock = self.mocker.replace(os.stat)
        stat_mock(self.patch_dir)
        self.mocker.result(FakeStat())
        self.mocker.replay()

        resource2.make([])
        result = self.store.execute("SELECT version FROM patch")
        self.assertEqual([(1,), (2,)], sorted(result.get_all()))
Exemplo n.º 20
0
class TableReplacer:
    """
    This is the base class used by every Updater
    """

    def __init__(self, old_db_file, new_db_file, start_ver):

        from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5
        from globaleaks.db.update_6_7 import Node_version_6, Context_version_6
        from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \
            Receiver_version_7, InternalFile_version_7
        from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8
        from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \
            Receiver_version_9, User_version_9
        from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10
        from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node' : [ Node_version_5, Node_version_6, Node_version_7, Node_version_9, None, Node_version_11, None, models.Node],
            'User' : [ User_version_5, User_version_9, None, None, None, models.User, None, None],
            'Context' : [ Context_version_6, None, Context_version_7, Context_version_8, Context_version_11, None, None, models.Context],
            'Receiver': [ Receiver_version_7, None, None, Receiver_version_8, Receiver_version_9, models.Receiver, None, None],
            'ReceiverFile' : [ models.ReceiverFile, None, None, None, None, None, None, None],
            'Notification': [ Notification_version_7, None, None, Notification_version_8, models.Notification, None, None, None],
            'Comment': [ Comment_version_5, models.Comment, None, None, None, None, None, None],
            'InternalTip' : [ InternalTip_version_10, None, None, None, None, None, models.InternalTip, None],
            'InternalFile' : [ InternalFile_version_7, None, None, InternalFile_version_10, None, None, models.InternalFile, None],
            'WhistleblowerTip' : [ models.WhistleblowerTip, None, None, None, None, None, None, None],
            'ReceiverTip' : [ models.ReceiverTip, None, None, None, None, None, None , None],
            'ReceiverInternalTip' : [ models.ReceiverInternalTip, None, None, None, None, None, None, None],
            'ReceiverContext' : [ models.ReceiverContext, None, None, None, None, None, None, None],
            'Message' : [ models.Message, None, None, None, None, None, None, None],
            'Stats' : [models.Stats, None, None, None, None, None, None, None],
            'ApplicationData' : [ApplicationData_version_10, None, None, None, None, None, None, models.ApplicationData],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -5 because the relase 0,1,2,3,4 are not supported anymore
            assert len(v) == (DATABASE_VERSION + 1 - 5), \
                "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k)

        print "%s Opening old DB: %s" % (self.debug_info, old_db_file)
        old_database = create_database("sqlite:%s" % self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database("sqlite:%s" % new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:

            print "%s Acquire SQL schema %s" % (self.debug_info, GLSetting.db_schema_file)

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                print "Unable to access %s" % GLSetting.db_schema_file
                raise Exception("Unable to access db schema file")

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f.readlines()).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query+';')
                    except OperationalError:
                        print "OperationalError in [%s]" % create_query

            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver +1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query+';')
            except OperationalError as excep:
                print "%s OperationalError in [%s]" % (self.debug_info, create_query)
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 5)

        if not self.table_history.has_key(table_name):
            print "Not implemented usage of get_right_model %s (%s %d)" % (
                __file__, table_name, self.start_ver)
            raise NotImplementedError

        assert version <= DATABASE_VERSION, "wrong developer brainsync"

        if self.table_history[table_name][table_index]:
            # print "Immediate return %s = %s at version %d" % \
            #       ( table_name, self.table_history[table_name][table_index], version )
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        #
        # print "Requested version %d of %s need to be collected in the past" %\
        #       (version, table_name)

        while version >= 0:
            if self.table_history[table_name][table_index]:
            # print ".. returning %s = %s" %\
            #           ( table_name, self.table_history[table_name][table_index] )
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):

        print "%s default %s migration assistant: #%d" % (
            self.debug_info, table_name,
            self.store_old.find(self.get_right_model(table_name, self.start_ver)).count())

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for k, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name) )

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        print "%s default %s migration assistant" % (self.debug_info, table_name)

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for k, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name) )

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("ApplicationData")
Exemplo n.º 21
0
from kiwi.ui.objectlist import Column
from kiwi.ui.search import SearchContainer, DateSearchFilter


class Sale(object):
    __storm_table__ = 'sale'
    id = Int(primary=True)
    description = Unicode()
    price = Float()
    date = Date()

database = create_database("sqlite:")
store = Store(database)

store.execute(
    "CREATE TABLE sale "
    "(id INTEGER PRIMARY KEY, description VARCHAR, price FLOAT, date DATE)"
)

today = datetime.date.today()

for description, price, date in [
    ('Cup of coffee', 2.04, today - datetime.timedelta(1)),
    ('Chocolate bar', 1.85, today - datetime.timedelta(40)),
    ('Candy',         0.99, today - datetime.timedelta(30)),
    ('Grape Juice',   3.38, today - datetime.timedelta(23)),
    ('Ice tea',       1.25, today - datetime.timedelta(10)),
    ('Cookies',       0.85, today - datetime.timedelta(5)),
    ('Noogies',       1.45, today - datetime.timedelta(2)),
    ('Chocolate bar', 1.85, today)]:

    s = Sale()
Exemplo n.º 22
0
class StormManager(Singleton):
    log = logging.getLogger('{}.StormManager'.format(__name__))

    def __init__(self):
        pass

    @loggingInfo
    def init(self, *args):
        self.dbOK = False
        self.openDB()

    @loggingInfo
    def reset(self):
        self.closeDB()
        self.openDB()

    @loggingInfo
    def openDB(self):
        try:
            self._config = ConfigManager()
            self.db = self._config.config[self._config.database]["database"]
            create_db = False
            if self.db == self._config.Sqlite:
                folder = self._config.config[self._config.database]["folder"]
                loc = folder + '/icepapcms.db'
                print("Using Sqlite database at %s" % loc)
                create_db = not os.path.exists(loc)
                if create_db:
                    print("No database file found, creating it")
                    if not os.path.exists(folder):
                        os.mkdir(folder)
                self._database = create_database("%s:%s" % (self.db, loc))
            else:
                server = self._config.config[self._config.database]["server"]
                user = self._config.config[self._config.database]["user"]
                pwd = self._config.config[self._config.database]["password"]
                scheme = "{}://{}:{}@{}/icepapcms".format(
                    self.db, user, pwd, server)

                if self.db == 'mysql':
                    self._database = MySQL(scheme)
                else:
                    self._database = create_database(scheme)

            self._store = Store(self._database)
            if create_db:
                self.dbOK = self.createSqliteDB()
            else:
                self.dbOK = True
        except Exception as e:
            self.log.error("Unexpected error on openDB: %s", e)
            self.dbOK = False

    @loggingInfo
    def createSqliteDB(self):
        try:
            sql_file = resource_filename('icepapcms.db', 'creates_sqlite.sql')
            with open(sql_file) as f:
                sql_script = f.read()
            statements = re.compile(r";[ \t]*$", re.M)

            for statement in statements.split(sql_script):
                # Remove any comments from the file
                statement = re.sub(r"--.*[\n\\Z]", "", statement)
                if statement.strip():
                    create = statement + ";"
                    self._store.execute(create)
            self._store.commit()
            return True
        except Exception as e:
            self.log.error("Unexpected error on createSqliteDB: %s", e)
            return False

    @loggingInfo
    def closeDB(self):
        try:
            if self.dbOK:
                self._store.close()
            return True
        except Exception as e:
            self.log.error("Unexpected error on closeDB:", e)
            self.dbOK = False
            return False

    @loggingInfo
    def store(self, obj):
        self._store.add(obj)

    @loggingInfo
    def remove(self, obj):
        self._store.remove(obj)

    @loggingInfo
    def addIcepapSystem(self, icepap_system):
        try:
            self._store.add(icepap_system)
            self.commitTransaction()
            return True
        except Exception as e:
            self.log.error(
                "some exception trying to store the icepap system "
                "%s: %s", icepap_system, e)
            return False

    @loggingInfo
    def deleteLocation(self, location):
        if self.db == self._config.Sqlite:
            for system in location.systems:
                self.deleteIcepapSystem(system)
        self._store.remove(location)
        self.commitTransaction()

    @loggingInfo
    def deleteIcepapSystem(self, icepap_system):
        if self.db == self._config.Sqlite:
            for driver in icepap_system.drivers:
                self.deleteDriver(driver)
        self._store.remove(icepap_system)
        self.commitTransaction()

    @loggingInfo
    def deleteDriver(self, driver):

        for cfg in driver.historic_cfgs:
            for par in cfg.parameters:
                self._store.remove(par)
            self._store.remove(cfg)
        self._store.remove(driver)
        self.commitTransaction()

    @loggingInfo
    def getAllLocations(self):
        try:
            locations = self._store.find(Location)
            location_dict = {}
            for location in locations:
                location_dict[location.name] = location
            return location_dict
        except Exception as e:
            self.log.error("Unexpected error on getAllLocations: %s", e)
            return {}

    @loggingInfo
    def getLocation(self, name):
        return self._store.get(Location, name)

    @loggingInfo
    def getIcepapSystem(self, icepap_name):
        return self._store.get(IcepapSystem, icepap_name)

    @loggingInfo
    def existsDriver(self, mydriver, id):

        drivers = self._store.find(
            IcepapDriver, IcepapDriver.addr == IcepapDriverCfg.driver_addr,
            IcepapDriverCfg.id == CfgParameter.cfg_id,
            CfgParameter.name == str("ID"), CfgParameter.value == id)
        if drivers:
            for driver in drivers:
                if driver.addr != mydriver.addr:
                    return driver
            return None
        else:
            return None

    @loggingInfo
    def getLocationIcepapSystem(self, location):
        try:
            icepaps = self._store.find(IcepapSystem,
                                       IcepapSystem.location_name == location)
            icepaps.order_by(IcepapSystem.name)
            ipapdict = {}
            for ipap_sys in icepaps:
                ipapdict[ipap_sys.name] = ipap_sys
            return ipapdict
        except Exception as e:
            self.log.error(
                "Unexpected error on getLocationIcepapSystem: "
                "%s", e)
            return {}

    @loggingInfo
    def rollback(self):
        self._store.rollback()

    @loggingInfo
    def commitTransaction(self):
        try:
            self._store.commit()
            return True
        except Exception:
            return False
Exemplo n.º 23
0
class TestChangeTracker(object):


    class A(object):
        __storm_table__ = 'testob'
        changehistory = ChangeHistory.configure("history")
        clt = ChangeTracker(changehistory)
        id = Int(primary=1)
        textval = Unicode(validator=clt)
        intval = Int(validator=clt)

    def setUp(self):
        database = create_database('sqlite:')
        self.store = Store(database)
        self.store.execute("""
            CREATE table history (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                ref_class VARCHAR(200),
                ref_pk VARCHAR(200),
                ref_attr VARCHAR(200),
                new_value VARCHAR(200),
                ctime DATETIME,
                cuser INT
            )
        """)
        self.store.execute("""
            CREATE TABLE testob (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                textval VARCHAR(200),
                intval INT,
                dateval DATETIME
            )""")

    def tearDown(self):
        self.store.rollback()

    def test_calls_next_validator(self):
        clt = ChangeTracker(ChangeHistory.configure("history"), next_validator = lambda ob, attr, v: v*2)

        class B(self.A):
            textval = Unicode(validator=clt)

        b = B()
        b.textval = u'bork'
        assert b.textval == u'borkbork'

    def test_adds_log_entries(self):

        class B(self.A):
            clt = ChangeTracker(ChangeHistory.configure("history"))
            textval = Unicode(validator=clt)

        b = self.store.add(B())
        b.textval = u'pointless'
        b.textval = u'aimless'
        changes = list(self.store.find(b.clt.change_cls))
        assert_equal(len(changes), 2)
        assert_equal(changes[0].new_value, 'pointless')
        assert_equal(changes[1].new_value, 'aimless')

    def test_value_type_preserved(self):
        a = self.store.add(self.A())
        a.textval = u'one'
        a.intval = 1
        changes = list(self.store.find(a.clt.change_cls))
        assert_equal(type(changes[0].new_value), unicode)
        assert_equal(type(changes[1].new_value), int)

    def test_ctime_set(self):
        start = datetime.now()
        a = self.store.add(self.A())
        a.textval = u'x'
        changes = list(self.store.find(a.clt.change_cls))
        assert_equal(type(changes[0].ctime), datetime)
        assert start < changes[0].ctime < datetime.now()

    def test_cuser_set(self):
        def getuser():
            return u'Fred'

        history = ChangeHistory.configure("history", getuser=getuser, usertype=Unicode)
        class B(self.A):
            textval = Unicode(validator=ChangeTracker(history))

        b = self.store.add(B())
        b.textval = u'foo'
        changes = self.store.find(history)
        assert_equal(changes[0].cuser, u'Fred')


    def test_changes_for_returns_change_history(self):
        a = self.store.add(self.A())
        b = self.store.add(self.A())
        a.id = 1
        a.textval = u'one'
        a.textval = u'two'
        b.id = 2
        b.textval = u'ein'
        b.textval = u'zwei'

        assert_equal([c.new_value for c in a.changehistory.changes_for(a)], [u'one', u'two'])
        assert_equal([c.new_value for c in a.changehistory.changes_for(b)], [u'ein', u'zwei'])
Exemplo n.º 24
0
		return dict(
			x = self.x,
			y = self.y
		)

	def __repr__( self ):
		return '<Point2D x:%s, y:%s>' % (
			self.x,
			self.y
		)

database = create_database('sqlite://:memory:')
store = Store(database)
store.execute('''CREATE TABLE point2d (
	id INTEGER PRIMARY KEY,
	x INTEGER,
	y INTEGER
)''')

p1 = Point2D(10,10)
p2 = Point2D(10,20)
p3 = Point2D(20,20)
p4 = Point2D(20,10)

store.add( p1 )
store.add( p2 )
store.add( p3 )
store.add( p4 )

points = store.find(Point2D, Point2D.x == 10)
print points[0] + points[1]
Exemplo n.º 25
0
class IssuesLog():
    def __init__(self):
        self._connect()
        # it is not incremental so we first drop the table
        self._drop_db()
        self._create_db()

    def _connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':' +
                                        opts.db_password_out + '@' +
                                        opts.db_hostname_out + ':' +
                                        opts.db_port_out + '/' +
                                        opts.db_database_out)
        self.store = Store(self.database)

    def _create_db(self):
        self.store.execute(self._get_sql_create())

    def _drop_db(self):
        self.store.execute(self._get_sql_drop())

    def _get_people_id(self, email):
        """
        Gets the id of an user
        """
        try:
            p = self.store.find(DBPeople, DBPeople.email == email).one()
            return p.id
        except (AttributeError, NotOneError):
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def _get_sql_drop(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_sql_create(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def _copy_issue_ext(self, aux, db_ilog):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    # TODO: reuse _copy_standard_values
    def _copy_issue(self, db_ilog):
        """
        This method returns a copy of the DB*Log object
        """
        aux = self._get_dbissues_object(db_ilog.issue, db_ilog.tracker_id)
        aux.issue_id = db_ilog.issue_id
        aux.change_id = db_ilog.change_id
        aux.type = db_ilog.type
        aux.summary = db_ilog.summary
        aux.description = db_ilog.description
        aux.status = db_ilog.status
        aux.resolution = db_ilog.resolution
        aux.priority = db_ilog.priority
        aux.submitted_by = db_ilog.submitted_by
        aux.date = db_ilog.date
        aux.assigned_to = db_ilog.assigned_to
        aux = self._copy_issue_ext(aux, db_ilog)
        return aux

    def _assign_values(self, db_ilog, field, value):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes " +
                                    "WHERE issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            values = self.store.execute(
                "SELECT old_value FROM changes WHERE issue_id=%s AND \
                field=\"%s\" ORDER BY changed_on LIMIT 1" %
                (db_ilog.issue_id, f[0]))
            for v in values:
                db_ilog = self._assign_values(db_ilog, f[0], v[0])
# Initial status does not have a real change
            db_ilog.change_id = 0
        return db_ilog

    def _get_dbissues_object(self, issue_name, tracker_id):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _copy_standard_values(self, issue, issue_log):
        """
        Copy the standard values from the issue object to the issue_log object
        """
        issue_log.issue_id = issue.id
        issue_log.type = issue.type
        issue_log.summary = issue.summary
        issue_log.description = issue.description
        issue_log.status = issue.status
        issue_log.resolution = issue.resolution
        issue_log.priority = issue.priority
        issue_log.submitted_by = issue.submitted_by
        issue_log.date = issue.submitted_on
        issue_log.assigned_to = issue.assigned_to
        return issue_log

    def _print_final_msg(self):
        """
        Abstract method for inserting extra data related to a change
        """
        raise NotImplementedError

    def _get_changes(self, issue_id):
        aux = self.store.execute("SELECT id, field, new_value, changed_by, \
        changed_on FROM changes where issue_id=%s" % (issue_id))
        return aux

    def _post_history(self, issue_id):
        """
        Abstract method for inserting extra data usign full issue history
        """
        pass

    def run(self):
        ndone = 0
        issues = self.store.find(DBIssue)
        total = str(issues.count())
        print("[IssuesLog] Total issues to analyze: " + str(issues.count()))
        for i in issues:
            if (ndone % 1000 == 0):
                print("[IssuesLog] Analyzed " + str(ndone) + "/" + str(total))
            db_ilog = self._get_dbissues_object(i.issue, i.tracker_id)
            db_ilog = self._copy_standard_values(i, db_ilog)
            final_status = db_ilog.status

            db_ilog = self._build_initial_state(db_ilog)

            self.store.add(db_ilog)
            self.store.flush()

            # the code below gets all the changes and insert a row per change
            changes = self._get_changes(db_ilog.issue_id)

            for ch in changes:
                change_id = ch[0]
                field = ch[1]
                new_value = ch[2]
                changed_by = ch[3]
                date = ch[4]
                # we need a new object to be inserted in the database
                db_ilog = self._copy_issue(db_ilog)
                db_ilog.date = date
                db_ilog.change_id = change_id
                db_ilog.submitted_by = changed_by
                db_ilog = self._assign_values(db_ilog, field, new_value)

                try:
                    self.store.add(db_ilog)
                    self.store.flush()
                except:
                    # self.store.rollback() # is this useful in this context?
                    traceback.print_exc()
            ##self._post_history(db_ilog, final_status)
            self.store.commit()
            ndone += 1
        self._print_final_msg()
Exemplo n.º 26
0
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """

    def __init__(self, old_db_file, new_db_file, start_ver):
        from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8
        from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9
        from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10
        from globaleaks.db.update_11_12 import Node_v_11, Context_v_11
        from globaleaks.db.update_12_13 import Node_v_12, Context_v_12
        from globaleaks.db.update_13_14 import Node_v_13, Context_v_13
        from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \
            InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14
        from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15
        from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16
        from globaleaks.db.update_17_18 import Node_v_17
        from globaleaks.db.update_18_19 import Node_v_18
        from globaleaks.db.update_19_20 import Node_v_19, Notification_v_19, Comment_v_19, Message_v_19, \
            InternalTip_v_19, ReceiverTip_v_19, InternalFile_v_19, ReceiverFile_v_19, Receiver_v_19, \
            Context_v_19

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13, Node_v_14, Node_v_16, None, Node_v_17,
                     Node_v_18, Node_v_19, models.Node],
            'User': [User_v_9, None, User_v_14, None, None, None, None, models.User, None, None, None, None, None],
            'Context': [Context_v_8, Context_v_11, None, None, Context_v_12, Context_v_13, Context_v_14, Context_v_19,
                        None, None, None, None, models.Context],
            'Receiver': [Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None, None, Receiver_v_15,
                         Receiver_v_16, Receiver_v_19, None, None, models.Receiver],
            'ReceiverFile': [ReceiverFile_v_19, None, None, None, None, None, None, None, None, None, None, None,
                             models.ReceiverFile],
            'Notification': [Notification_v_8, Notification_v_14, None, None, None, None, None, Notification_v_15,
                             Notification_v_16, Notification_v_19, None, None, models.Notification],
            'Comment': [Comment_v_14, None, None, None, None, None, None, Comment_v_19, None, None, None, None,
                        models.Comment],
            'InternalTip': [InternalTip_v_10, None, None, InternalTip_v_14, None, None, None, InternalTip_v_19, None,
                            None, None, None, models.InternalTip],
            'InternalFile': [InternalFile_v_10, None, None, InternalFile_v_19, None, None, None, None, None, None, None,
                             None, models.InternalFile],
            'WhistleblowerTip': [models.WhistleblowerTip, None, None, None, None, None, None, None, None, None, None,
                                 None, None],
            'ReceiverTip': [ReceiverTip_v_19, None, None, None, None, None, None, None, None, None, None, None,
                            models.ReceiverTip],
            'ReceiverInternalTip': [models.ReceiverInternalTip, None, None, None, None, None, None, None, None, None,
                                    None, None, None],
            'ReceiverContext': [models.ReceiverContext, None, None, None, None, None, None, None, None, None, None,
                                None, None],
            'Message': [Message_v_19, None, None, None, None, None, None, None, None, None, None, None, models.Message],
            'Stats': [Stats_v_14, None, None, None, None, None, None, Stats_v_16, None, models.Stats, None, None, None],
            'ApplicationData': [models.ApplicationData, None, None, None, None, None, None, None, None, None, None,
                                None, None],
            'Field': [models.Field, None, None, None, None, None, None, None, None, None, None, None, None],
            'FieldOption': [models.FieldOption, None, None, None, None, None, None, None, None, None, None, None, None],
            'FieldField': [models.FieldField, None, None, None, None, None, None, None, None, None, None, None, None],
            'Step': [models.Step, None, None, None, None, None, None, None, None, None, None, None, None],
            'StepField': [models.StepField, None, None, None, None, None, None, None, None, None, None, None, None],
            'Anomalies': [models.Anomalies, None, None, None, None, None, None, None, None, None, None, None, None],
            'EventLogs': [models.EventLogs, None, None, None, None, None, None, None, None, None, None, None, None],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 8)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError('Version supplied must be less or equal to {}'.format(
                DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        models_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)
        ).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, models_count))

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(self.debug_info, table_name))

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldOption(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        has been created between 15 and 16!
        should be dropped befor 20
        """
        if self.start_ver < 20:
            return

        self._perform_copy_list("EventLogs")
Exemplo n.º 27
0
class Person(object):
    __storm_table__ = 'person'
    id = Int(primary=True)
    name = Unicode()


class Address(object):
    __storm_table__ = 'address'
    id = Int(primary=True)
    address = Unicode()
    person_id = Int()
    person = Reference(person_id, Person.id)


store.execute("CREATE TABLE person (id INTEGER PRIMARY KEY, name VARCHAR)")
store.execute(
    "CREATE TABLE address (id INTEGER PRIMARY KEY, address VARCHAR, person_id INTEGER, "
    "FOREIGN KEY(person_id) REFERENCES person(id))")

person = Person()
person.name = u'person'
print(person)

print("%r, %r" % (person.id, person.name))
# None, u'person'
# Notice that person.id is None since the Person instance is not attached to a valid database store yet.
store.add(person)

print("%r, %r" % (person.id, person.name))
# None, u'person'
Exemplo n.º 28
0
class SchemaTest(MockerTestCase):
    def setUp(self):
        super(SchemaTest, self).setUp()
        self.database = create_database("sqlite:///%s" % self.makeFile())
        self.store = Store(self.database)

        self._package_dirs = set()
        self._package_names = set()
        self.package = self.create_package(self.makeDir(), "patch_package")
        import patch_package

        creates = ["CREATE TABLE person (id INTEGER, name TEXT)"]
        drops = ["DROP TABLE person"]
        deletes = ["DELETE FROM person"]

        self.schema = Schema(creates, drops, deletes, patch_package)

    def tearDown(self):
        for package_dir in self._package_dirs:
            sys.path.remove(package_dir)

        for name in list(sys.modules):
            if name in self._package_names:
                del sys.modules[name]
            elif [
                    _f for _f in
                [name.startswith("%s." % x) for x in self._package_names] if _f
            ]:
                del sys.modules[name]

        super(SchemaTest, self).tearDown()

    def create_package(self, base_dir, name, init_module=None):
        """Create a Python package.

        Packages created using this method will be removed from L{sys.path}
        and L{sys.modules} during L{tearDown}.

        @param package_dir: The directory in which to create the new package.
        @param name: The name of the package.
        @param init_module: Optionally, the text to include in the __init__.py
            file.
        @return: A L{Package} instance that can be used to create modules.
        """
        package_dir = os.path.join(base_dir, name)
        self._package_names.add(name)
        os.makedirs(package_dir)

        file = open(os.path.join(package_dir, "__init__.py"), "w")
        if init_module:
            file.write(init_module)
        file.close()
        sys.path.append(base_dir)
        self._package_dirs.add(base_dir)

        return Package(package_dir, name)

    def test_check_with_missing_schema(self):
        """
        L{Schema.check} raises an exception if the given store is completely
        pristine and no schema has been applied yet. The transaction doesn't
        get rolled back so it's still usable.
        """
        self.store.execute("CREATE TABLE foo (bar INT)")
        self.assertRaises(SchemaMissingError, self.schema.check, self.store)
        self.assertIsNone(self.store.execute("SELECT 1 FROM foo").get_one())

    def test_check_with_unapplied_patches(self):
        """
        L{Schema.check} raises an exception if the given store has unapplied
        schema patches.
        """
        self.schema.create(self.store)
        contents = """
def apply(store):
    pass
"""
        self.package.create_module("patch_1.py", contents)
        self.assertRaises(UnappliedPatchesError, self.schema.check, self.store)

    def test_create(self):
        """
        L{Schema.create} can be used to create the tables of a L{Store}.
        """
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM person")
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])
        # By default changes are committed
        store2 = Store(self.database)
        self.assertEquals(list(store2.execute("SELECT * FROM person")), [])

    def test_create_with_autocommit_off(self):
        """
        L{Schema.autocommit} can be used to turn automatic commits off.
        """
        self.schema.autocommit(False)
        self.schema.create(self.store)
        self.store.rollback()
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM patch")

    def test_drop(self):
        """
        L{Schema.drop} can be used to drop the tables of a L{Store}.
        """
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])
        self.schema.drop(self.store)
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM person")

    def test_drop_with_missing_patch_table(self):
        """
        L{Schema.drop} works fine even if the user's supplied statements end up
        dropping the patch table that we created.
        """
        import patch_package
        schema = Schema([], ["DROP TABLE patch"], [], patch_package)
        schema.create(self.store)
        schema.drop(self.store)
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM patch")

    def test_delete(self):
        """
        L{Schema.delete} can be used to clear the tables of a L{Store}.
        """
        self.schema.create(self.store)
        self.store.execute("INSERT INTO person (id, name) VALUES (1, 'Jane')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane")])
        self.schema.delete(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_creates_schema(self):
        """
        L{Schema.upgrade} creates a schema from scratch if no exist, and is
        effectively equivalent to L{Schema.create} in such case.
        """
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM person")
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_marks_patches_applied(self):
        """
        L{Schema.upgrade} updates the patch table after applying the needed
        patches.
        """
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        statement = "SELECT * FROM patch"
        self.assertRaises(StormError, self.store.execute, statement)
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM patch")),
                          [(1, )])

    def test_upgrade_applies_patches(self):
        """
        L{Schema.upgrade} executes the needed patches, that typically modify
        the existing schema.
        """
        self.schema.create(self.store)
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.schema.upgrade(self.store)
        self.store.execute(
            "INSERT INTO person (id, name, phone) VALUES (1, 'Jane', '123')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane", u"123")])

    def test_advance(self):
        """
        L{Schema.advance} executes the given patch version.
        """
        self.schema.create(self.store)
        contents1 = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        contents2 = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN address TEXT')
"""
        self.package.create_module("patch_1.py", contents1)
        self.package.create_module("patch_2.py", contents2)
        self.schema.advance(self.store, 1)
        self.store.execute(
            "INSERT INTO person (id, name, phone) VALUES (1, 'Jane', '123')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane", u"123")])
Exemplo n.º 29
0
class ZStormResourceManagerTest(TestHelper):
    def is_supported(self):
        return has_transaction and has_zope_component and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        package_dir = self.makeDir()
        sys.path.append(package_dir)
        self.patch_dir = os.path.join(package_dir, "patch_package")
        os.mkdir(self.patch_dir)
        self.makeFile(path=os.path.join(self.patch_dir, "__init__.py"),
                      content="")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema(create, drop, delete, PatchSet(patch_package))
        self.databases = [{"name": "test", "uri": uri, "schema": schema}]
        self.resource = ZStormResourceManager(self.databases)
        self.resource.vertical_patching = False
        self.store = Store(create_database(uri))

    def tearDown(self):
        global_zstorm._reset()
        del sys.modules["patch_package"]
        sys.modules.pop("patch_package.patch_1", None)
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_lazy(self):
        """
        L{ZStormResourceManager.make} does not create all stores upfront, but
        only when they're actually used, likewise L{ZStorm.get}.
        """
        zstorm = self.resource.make([])
        self.assertEqual([], list(zstorm.iterstores()))
        store = zstorm.get("test")
        self.assertEqual([("test", store)], list(zstorm.iterstores()))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_upgrade_unknown_patch(self):
        """
        L{ZStormResourceManager.make} resets the schema if an unknown patch
        is found
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("INSERT INTO patch VALUES (2)")
        self.store.execute("CREATE TABLE test (foo TEXT, egg BOOL)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))
        self.assertEqual([(1, )],
                         list(store.execute("SELECT version FROM patch")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_make_commits_transaction_once(self):
        """
        L{ZStormResourceManager.make} commits schema changes only once
        across all stores, after all patch and delete statements have
        been executed.
        """
        database2 = {
            "name": "test2",
            "uri": "sqlite:///%s" % self.makeFile(),
            "schema": self.databases[0]["schema"]
        }
        self.databases.append(database2)
        other_store = Store(create_database(database2["uri"]))
        for store in [self.store, other_store]:
            store.execute("CREATE TABLE patch "
                          "(version INTEGER NOT NULL PRIMARY KEY)")
            store.execute("CREATE TABLE test (foo TEXT)")
            store.execute("INSERT INTO test (foo) VALUES ('data')")
            store.commit()

        with CaptureTracer() as tracer:
            zstorm = self.resource.make([])

        self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:])
        store1 = zstorm.get("test")
        store2 = zstorm.get("test2")
        self.assertEqual([], list(store1.execute("SELECT foo FROM test")))
        self.assertEqual([], list(store2.execute("SELECT foo FROM test")))

    def test_make_zstorm_overwritten(self):
        """
        L{ZStormResourceManager.make} registers its own ZStorm again if a test
        has registered a new ZStorm utility overwriting the resource one.
        """
        zstorm = self.resource.make([])
        provideUtility(ZStorm())
        self.resource.make([])
        self.assertIs(zstorm, getUtility(IZStorm))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """
        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_clean_with_force_delete(self):
        """
        If L{ZStormResourceManager.force_delete} is C{True}, L{Schema.delete}
        is always invoked upon test cleanup.
        """
        zstorm = self.resource.make([])
        zstorm.get("test")  # Force the creation of the store
        self.store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        self.store.commit()
        self.resource.force_delete = True
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))

    def test_wb_clean_clears_alive_cache_before_abort(self):
        """
        L{ZStormResourceManager.clean} clears the alive cache before
        aborting the transaction.
        """
        class Test(object):
            __storm_table__ = "test"
            bar = Int(primary=True)

            def __init__(self, bar):
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(1))
        store.add(Test(2))
        real_invalidate = store.invalidate

        def invalidate_proxy():
            self.assertEqual(0, len(list(store._alive.values())))
            real_invalidate()

        store.invalidate = invalidate_proxy

        self.resource.clean(zstorm)

    def test_schema_uri(self):
        """
        It's possible to specify an alternate URI for applying the schema
        and cleaning up tables after a test.
        """
        schema_uri = "sqlite:///%s" % self.makeFile()
        self.databases[0]["schema-uri"] = schema_uri
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        schema_store = Store(create_database(schema_uri))

        # The schema was applied using the alternate schema URI
        statement = "SELECT name FROM sqlite_master WHERE name='patch'"
        self.assertEqual([], list(store.execute(statement)))
        self.assertEqual([("patch", )], list(schema_store.execute(statement)))

        # The cleanup is performed with the alternate schema URI
        store.commit()
        schema_store.execute("INSERT INTO test (foo) VALUES ('data')")
        schema_store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(schema_store.execute("SELECT * FROM test")))

    def test_schema_uri_with_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, and the stamp indicates there's no
        need to update the schema, the resource clean up code will still
        connect as schema user if it needs to run the schema delete statements
        because of a commit.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.databases[0]["schema-uri"] = self.databases[0]["uri"]
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir
        zstorm = resource2.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo) VALUES ('data')")
        store.commit()  # Committing will force a schema.delete() run
        resource2.clean(zstorm)
        self.assertEqual([], list(store.execute("SELECT * FROM test")))

    def test_no_schema(self):
        """
        A particular database may have no schema associated.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([],
                         list(store.execute("SELECT * FROM sqlite_master")))

    def test_no_schema_clean(self):
        """
        A particular database may have no schema associated. If it's committed
        during tests, it will just be skipped when cleaning up tables.
        """
        self.databases[0]["schema"] = None
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.commit()

        with CaptureTracer() as tracer:
            self.resource.clean(zstorm)

        self.assertEqual([], tracer.queries)

    def test_deprecated_database_format(self):
        """
        The old deprecated format of the 'database' constructor parameter is
        still supported.
        """
        import patch_package
        uri = "sqlite:///%s" % self.makeFile()
        schema = ZSchema([], [], [], patch_package)
        resource = ZStormResourceManager({"test": (uri, schema)})
        zstorm = resource.make([])
        store = zstorm.get("test")
        self.assertIsNot(None, store)

    def test_use_global_zstorm(self):
        """
        If the C{use_global_zstorm} attribute is C{True} then the global
        L{ZStorm} will be used.
        """
        self.resource.use_global_zstorm = True
        zstorm = self.resource.make([])
        self.assertIs(global_zstorm, zstorm)

    def test_provide_utility_before_patches(self):
        """
        The L{IZStorm} utility is provided before patches are applied, in order
        to let them get it if they need.
        """
        content = ("from zope.component import getUtility\n"
                   "from storm.zope.interfaces import IZStorm\n"
                   "def apply(store):\n"
                   "    getUtility(IZStorm)\n")
        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content=content)
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([(1, ), (2, )],
                         sorted(store.execute("SELECT version FROM patch")))

    def test_create_schema_stamp_dir(self):
        """
        If a schema stamp directory is set, it's created automatically if it
        doesn't exist yet.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])
        self.assertTrue(os.path.exists(self.resource.schema_stamp_dir))

    def test_use_schema_stamp(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory hasn't been
        changed since the last known upgrade, no schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()

        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        with CaptureTracer() as tracer:
            resource2.make([])

        self.assertEqual([], tracer.queries)

    def test_use_schema_stamp_out_of_date(self):
        """
        If a schema stamp directory is set, then it's used to decide whether
        to upgrade the schema or not. In case the patch directory has changed
        a schema upgrade is run.
        """
        self.resource.schema_stamp_dir = self.makeFile()
        self.resource.make([])

        # Simulate a second test run that initializes the zstorm resource
        # from scratch, using the same schema stamp directory
        resource2 = ZStormResourceManager(self.databases)
        resource2.schema_stamp_dir = self.resource.schema_stamp_dir

        self.makeFile(path=os.path.join(self.patch_dir, "patch_2.py"),
                      content="def apply(store): pass")

        class FakeStat(object):
            st_mtime = os.stat(self.patch_dir).st_mtime + 1

        stat_mock = self.mocker.replace(os.stat)
        stat_mock(self.patch_dir)
        self.mocker.result(FakeStat())
        self.mocker.replay()

        resource2.make([])
        result = self.store.execute("SELECT version FROM patch")
        self.assertEqual([(1, ), (2, )], sorted(result.get_all()))
Exemplo n.º 30
0
class IssuesLog():

    def __init__(self, backend_name):
        self.backend_name = backend_name
        self.connect()
        self.create_db()

    def connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':'
                                        + opts.db_password_out + '@'
                                        + opts.db_hostname_out + ':'
                                        + opts.db_port_out + '/'
                                        + opts.db_database_out)
        self.store = Store(self.database)

    def create_db(self):
        print("self.backend_name = %s" % (self.backend_name))
        if self.backend_is_bugzilla():
            self.store.execute(__sql_table_bugzilla__)
        elif self.backend_is_jira():
            self.store.execute(__sql_table_jira__)

    def copy_issue(self, db_ilog):
        """
        This method creates a copy of DBBugzilla/JiraIssuesLog object
        """

        if self.backend_is_bugzilla():
            aux = DBBugzillaIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            #aux = DBBugzillaIssuesLog (db_ilog.issue_id)
            aux.alias = db_ilog.alias
            aux.delta_ts = db_ilog.delta_ts
            aux.reporter_accessible = db_ilog.reporter_accessible
            aux.cclist_accessible = db_ilog.cclist_accessible
            aux.classification_id = db_ilog.classification_id
            aux.classification = db_ilog.classification
            aux.product = db_ilog.product
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.rep_platform = db_ilog.rep_platform
            aux.op_sys = db_ilog.op_sys
            aux.dup_id = db_ilog.dup_id
            aux.bug_file_loc = db_ilog.bug_file_loc
            aux.status_whiteboard = db_ilog.status_whiteboard
            aux.target_milestone = db_ilog.target_milestone
            aux.votes = db_ilog.votes
            aux.everconfirmed = db_ilog.everconfirmed
            aux.qa_contact = db_ilog.qa_contact
            aux.estimated_time = db_ilog.estimated_time
            aux.remaining_time = db_ilog.remaining_time
            aux.actual_time = db_ilog.actual_time
            aux.deadline = db_ilog.deadline
            aux.keywords = db_ilog.keywords
            aux.cc = db_ilog.cc
            aux.group_bugzilla = db_ilog.group_bugzilla
            aux.flag = db_ilog.flag
            return aux

        elif self.backend_is_jira():
            aux = DBJiraIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            aux.link = db_ilog.link
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.issue_key = db_ilog.issue_key
            aux.environment = db_ilog.environment
            aux.project = db_ilog.project
            aux.project_key = db_ilog.project_key
            aux.security = db_ilog.security

            return aux

    def get_people_id(self, email, tracker_id):
        """
        Gets the id of an user
        """
        p = self.store.find(DBPeople, DBPeople.email == email).one()
        ##
        ## the code below was created ad-hoc for KDE solid
        ##
        try:
            return p.id
        except AttributeError:
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email, tracker_id)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def get_last_change_date(self):
        """
        This method gets the date of the last change included in the log table
        """
        if self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog)
            aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        elif self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog)
            aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        return None

    def get_issues_changed_since(self, date):
        """
        This method fetchs the issues changes since date
        """

        #SELECT DISTINCT(issues.id) FROM issues, changes
        #WHERE issues.id = changes.issue_id
        #AND (issues.submitted_on >= '2012-02-28 12:34:44'
        #    OR changes.changed_on >= '2012-02-28 12:34:44');

        result = self.store.find(DBIssue,
                                 DBChange.issue_id == DBIssue.id,
                                 Or(DBIssue.submitted_on > date,
                                    DBChange.changed_on > date )).group_by(DBIssue.id)

        return result

    def get_previous_state(self, issue_id):
        """
        This method returns a db_ilog object with the last row found in
        the log table
        """
        db_ilog = None
        if self.backend_is_jira():
            rows = self.store.find(DBJiraIssuesLog,
                                   DBJiraIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBJiraIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBJiraIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.issue_key = aux.issue_key
                db_ilog.link = aux.link
                db_ilog.environment = aux.environment
                db_ilog.security = aux.security
                db_ilog.updated = aux.updated
                db_ilog.version = aux.version
                db_ilog.component = aux.component
                db_ilog.votes = aux.votes
                db_ilog.project = aux.project
                db_ilog.project_id = aux.project_id
                db_ilog.project_key = aux.project_key
        else:  # elif self.backend_is_bugzilla():
            rows = self.store.find(DBBugzillaIssuesLog,
                                   DBBugzillaIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBBugzillaIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBBugzillaIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.alias = aux.alias
                db_ilog.delta_ts = aux.delta_ts
                db_ilog.reporter_accessible = aux.reporter_accessible
                db_ilog.cclist_accessible = aux.cclist_accessible
                db_ilog.classification_id = aux.classification_id
                db_ilog.classification = aux.classification
                db_ilog.product = aux.product
                db_ilog.component = aux.component
                db_ilog.version = aux.version
                db_ilog.rep_platform = aux.rep_platform
                db_ilog.op_sys = aux.op_sys
                db_ilog.dup_id = aux.dup_id
                db_ilog.bug_file_loc = aux.bug_file_loc
                db_ilog.status_whiteboard = aux.status_whiteboard
                db_ilog.target_milestone = aux.target_milestone
                db_ilog.votes = aux.votes
                db_ilog.everconfirmed = aux.everconfirmed
                db_ilog.qa_contact = aux.qa_contact
                db_ilog.estimated_time = aux.estimated_time
                db_ilog.remaining_time = aux.remaining_time
                db_ilog.actual_time = aux.actual_time
                db_ilog.deadline = aux.deadline
                db_ilog.keywords = aux.keywords
                db_ilog.cc = aux.cc
                db_ilog.group_bugzilla = aux.group_bugzilla
                db_ilog.flag = aux.flag

        return db_ilog

    def issue_is_new(self, issue_id):
        """
        This method returns True if the issue is not logged in the log table
        """
        if self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog,
                                     DBJiraIssuesLog.issue_id == issue_id)
        elif self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog,
                                     DBBugzillaIssuesLog.issue_id == issue_id)
        return (result.count() == 0)

    def build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes\
        where issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            value = self.store.execute("SELECT old_value FROM changes \
            WHERE issue_id=%s AND field=\"%s\" ORDER BY changed_on LIMIT 1"
                                  % (db_ilog.issue_id, f[0]))
            for v in value:
                if self.backend_is_bugzilla():
                    # Bugzilla section
                    #
                    if f[0] in bg_issues_links:
                        table_field = bg_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(
                                v[0], self.get_tracker_id(db_ilog.issue_id))
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'alias':
                            db_ilog.alias = v[0]
                        elif table_field == 'reporter_accessible':
                            db_ilog.reporter_accessible = v[0]
                        elif table_field == 'cclist_accessible':
                            db_ilog.cclist_accessible = v[0]
                        elif table_field == 'product':
                            db_ilog.product = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'version':
                            db_ilog.version = v[0]
                        elif table_field == 'rep_platform':
                            db_ilog.rep_platform = v[0]
                        elif table_field == 'op_sys':
                            db_ilog.op_sys = v[0]
                        elif table_field == 'bug_file_loc':
                            db_ilog.bug_file_loc = v[0]
                        elif table_field == 'status_whiteboard':
                            db_ilog.status_whiteboard = v[0]
                        elif table_field == 'target_milestone':
                            db_ilog.target_milestone = v[0]
                        elif table_field == 'votes':
                            db_ilog.votes = v[0]
                        elif table_field == 'everconfirmed':
                            db_ilog.everconfirmed = v[0]
                        elif table_field == 'qa_contact':
                            db_ilog.qa_contact = v[0]
                        elif table_field == 'keywords':
                            db_ilog.Keywords = v[0]
                        elif table_field == 'cc':
                            db_ilog.cc = v[0]
                if self.backend_is_jira():
                    # Jira section
                    #
                    if f[0] in jira_issues_links:
                        table_field = jira_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(v[0])
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'link':
                            db_ilog.link = v[0]
                        elif table_field == 'environment':
                            db_ilog.environment = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'description':
                            db_ilog.description = v[0]
                        elif table_field == 'security':
                            db_ilog.security = v[0]

        return db_ilog

    def backend_is_bugzilla(self):
        return self.backend_name == 'bg'

    def backend_is_jira(self):
        return self.backend_name == 'jira'

    def get_last_values(self, issue_row):
        i = issue_row
        db_ilog = None
        if self.backend_is_bugzilla():
            db_ilog_bugzilla = DBBugzillaIssuesLog(i.issue, i.tracker_id)
            db_ilog_bugzilla.issue_id = i.id
            db_ilog_bugzilla.type = i.type
            db_ilog_bugzilla.summary = i.summary
            db_ilog_bugzilla.description = i.description
            db_ilog_bugzilla.status = i.status
            db_ilog_bugzilla.resolution = i.resolution
            db_ilog_bugzilla.priority = i.priority
            db_ilog_bugzilla.submitted_by = i.submitted_by
            db_ilog_bugzilla.date = i.submitted_on
            db_ilog_bugzilla.assigned_to = i.assigned_to

            ib = self.store.find(DBBugzillaIssueExt, \
                                 DBBugzillaIssueExt.issue_id == db_ilog_bugzilla.issue_id).one()

            ####
            db_ilog_bugzilla.alias = ib.alias
            db_ilog_bugzilla.delta_ts = ib.delta_ts
            db_ilog_bugzilla.reporter_accessible = ib.reporter_accessible
            db_ilog_bugzilla.cclist_accessible = ib.cclist_accessible
            db_ilog_bugzilla.classification_id = ib.classification_id
            db_ilog_bugzilla.classification = ib.classification
            db_ilog_bugzilla.product = ib.product
            db_ilog_bugzilla.component = ib.component
            db_ilog_bugzilla.version = ib.version
            db_ilog_bugzilla.rep_platform = ib.rep_platform
            db_ilog_bugzilla.op_sys = ib.op_sys
            db_ilog_bugzilla.dup_id = ib.dup_id
            db_ilog_bugzilla.bug_file_loc = ib.bug_file_loc
            db_ilog_bugzilla.status_whiteboard = ib.status_whiteboard
            db_ilog_bugzilla.target_milestone = ib.target_milestone
            db_ilog_bugzilla.votes = ib.votes
            db_ilog_bugzilla.everconfirmed = ib.everconfirmed
            db_ilog_bugzilla.qa_contact = ib.qa_contact
            db_ilog_bugzilla.estimated_time = ib.estimated_time
            db_ilog_bugzilla.remaining_time = ib.remaining_time
            db_ilog_bugzilla.actual_time = ib.actual_time
            db_ilog_bugzilla.deadline = ib.deadline
            db_ilog_bugzilla.keywords = ib.keywords
            db_ilog_bugzilla.cc = ib.cc
            db_ilog_bugzilla.group_bugzilla = ib.group_bugzilla
            db_ilog_bugzilla.flag = ib.flag
            db_ilog = db_ilog_bugzilla

        elif self.backend_is_jira():
            db_ilog = DBJiraIssuesLog(i.issue, i.tracker_id)
            db_ilog.issue_id = i.id
            db_ilog.type = i.type
            db_ilog.summary = i.summary
            db_ilog.description = i.description
            db_ilog.status = i.status
            db_ilog.resolution = i.resolution
            db_ilog.priority = i.priority
            db_ilog.submitted_by = i.submitted_by
            db_ilog.date = i.submitted_on
            db_ilog.assigned_to = i.assigned_to

            ib = self.store.find(DBJiraIssueExt, \
                                 DBJiraIssueExt.issue_id == db_ilog.issue_id).one()

            db_ilog.issue_key = ib.issue_key
            db_ilog.link = ib.link
            db_ilog.environment = ib.environment
            db_ilog.security = ib.security
            db_ilog.updated = ib.updated
            db_ilog.version = ib.version
            db_ilog.component = ib.component
            db_ilog.votes = ib.votes
            db_ilog.project = ib.project
            db_ilog.project_id = ib.project_id
            db_ilog.project_key = ib.project_key

        return db_ilog

    def insert_new_bugs_created(self, date_from, date_to):
        """
        This method inserts an entry with the data of the creation time
        """
        if (not date_from) and (not date_to):
            issues = self.store.find(DBIssue)
        elif not date_from:
            issues = self.store.find(DBIssue, DBIssue.submitted_on < date_to)
        elif not date_to:
            issues = self.store.find(DBIssue, DBIssue.submitted_on > date_from)
        else:
            issues = self.store.find(DBIssue,
                                     And(DBIssue.submitted_on <= date_to,
                                         DBIssue.submitted_on > date_from))

        issues = issues.order_by(Asc(DBIssue.submitted_on))
        ## we store the initial data for each bug found
        for i in issues:
            db_ilog = self.get_last_values(i)  # from issues and change tables
            db_ilog = self.build_initial_state(db_ilog)
            self.store.add(db_ilog)
            printdbg("Issue #%s created at %s - date_from = %s - date_to = %s"
                     % (db_ilog.issue, db_ilog.date, date_from, date_to))

    def get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def run(self):

        last_change_date = self.get_last_change_date()
        printdbg("Last change logged at %s" % (last_change_date))

        date_from = None
        date_to = None

        if last_change_date:
            changes = self.store.find(DBChange,
                                      DBChange.changed_on > last_change_date)
            date_from = last_change_date
        else:
            changes = self.store.find(DBChange)

        changes = changes.order_by(Asc(DBChange.changed_on))

        for ch in changes:
            # insert creation if needed
            date_to = ch.changed_on
            self.insert_new_bugs_created(date_from, date_to)
            date_from = date_to

            field = ch.field
            new_value = ch.new_value
            changed_by = ch.changed_by
            date = ch.changed_on
            issue_id = ch.issue_id

            #print("field = %s, new_value = %s, changed_by = %s, date = %s"
            #      % (field, new_value, str(changed_by), str(date)))

            db_ilog = self.get_previous_state(issue_id)

            printdbg("Issue #%s modified at %s" %
                     (db_ilog.issue, date))

            if self.backend_is_bugzilla():
                # Bugzilla section
                #
                #
                if (field in bg_issues_links):
                    table_field = bg_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'alias':
                        db_ilog.alias = new_value
                    elif table_field == 'reporter_accessible':
                        db_ilog.reporter_accessible = new_value
                    elif table_field == 'cclist_accessible':
                        db_ilog.cclist_accessible = new_value
                    elif table_field == 'product':
                        db_ilog.product = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'rep_platform':
                        db_ilog.rep_platform = new_value
                    elif table_field == 'op_sys':
                        db_ilog.op_sys = new_value
                    elif table_field == 'bug_file_loc':
                        db_ilog.bug_file_loc = new_value
                    elif table_field == 'status_whiteboard':
                        db_ilog.status_whiteboard = new_value
                    elif table_field == 'target_milestone':
                        db_ilog.target_milestone = new_value
                    elif table_field == 'votes':
                        db_ilog.votes = new_value
                    elif table_field == 'everconfirmed':
                        db_ilog.everconfirmed = new_value
                    elif table_field == 'qa_contact':
                        db_ilog.qa_contact = new_value
                    elif table_field == 'keywords':
                        db_ilog.Keywords = new_value
                    elif table_field == 'cc':
                        db_ilog.cc = new_value

                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            elif self.backend_is_jira():
                # Jira section
                #
                #

                if (field in jira_issues_links):
                    table_field = jira_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'description':
                        db_ilog.description = new_value
                    elif table_field == 'link':
                        db_ilog.link = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'security':
                        db_ilog.security = new_value
                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            # if there are changes, it stores the last bugs after the last
            # change. If there are no changes, insert all the created bugs
        self.insert_new_bugs_created(date_from, None)
        self.store.commit()
Exemplo n.º 31
0
class NCBITaxnomyInserter(object):
    
    def __init__(self, divisions_file_path, taxonomy_divisions_file_path):       
        
        self.included_divisions= {0:"Bacteria",3:"Phages",9:"Viruses", 11:"Environmental samples", 
                1:"Invertebrates", 4:"Plants and Fungi"}

        self.divisions_file_path= divisions_file_path
        self.taxonomy_divisions_file_path= taxonomy_divisions_file_path
        self.__init_database()
        
        if not self.init_tables():
            self.create_tables()
        


    def __init_database(self):    
        """
        creates the sqlite database instance and checks if the database exists in biodb.
        """
        database= create_database("sqlite:%s" % biodb_sql_db_path)
        print "Created storm database from %s." % biodb_sql_db_path
        self.store= Store(database)
        

    def init_tables(self):
        self.biodb_table= "biodb_ncbi"
        self.taxonomy_division_table = "biodb_ncbi_taxonomy_division"
        self.division_table= "biodb_ncbi_division"
      
        #### check if the db_name exists in the database
        table_list= [table[0] for table in self.store.execute('select tbl_name from SQLITE_MASTER')]
       
        return 0 if self.taxonomy_division_table not in table_list else 1 


    def create_tables(self):
        self.create_taxonomy_division_string='CREATE TABLE '+ self.taxonomy_division_table +' (taxonID INTEGER PRIMARY KEY, divisionID INTEGER, FOREIGN KEY (taxonID) REFERENCES '+ self.biodb_table+'(id), FOREIGN KEY (divisionID) REFERENCES '+ self.division_table +'(id) )'
        

        self.create_division_string='CREATE TABLE '+ self.division_table +' (id INTEGER PRIMARY KEY, name VARCHAR)'


        self.store.execute(self.create_taxonomy_division_string)
        self.store.execute(self.create_division_string)

    def insert_division(self, div_id, name):
        div= NCBIDivision()
        div.id = int(div_id)
        div.name= unicode(name)

        self.store.add(div)


    def insert_taxonomy_division(self, taxon_id, div_id):
        n_tax_div= NCBITaxonomyDivision()
        n_tax_div.taxonID= int(taxon_id)
        n_tax_div.divisionID= int(div_id)

        self.store.add(n_tax_div)


    def insert_divisions_from_file(self):
        with open(self.divisions_file_path) as div_file:
            for line in div_file:
                cols= line.rstrip('\n').split(sep)
                div_id= cols[0]
                name= cols[2]
                self.insert_division(div_id, name)

        self.store.commit()

    def insert_taxonomy_divisions_from_file(self):
        i=0
        with open(self.taxonomy_divisions_file_path) as tax_div_file:
            for line in tax_div_file:
                cols= line.rstrip('\n').split(sep)
                
                division_id= int(cols[4].strip())
                
                if division_id in self.included_divisions:
                    tax_id= cols[0].strip()
                    self.insert_taxonomy_division(tax_id, division_id)

                    self.store.commit()

                    i+=1
                    if i % 10000 == 0:
                        print "%d taxa inserted!" %i 
Exemplo n.º 32
0
			id = self.id,
			name = self.name,
			age = self.age
		)

	def __repr__( self ):
		return '<Person (name=%s, age=%s)>' % (
			self.name,
			self.age,
		)

database = create_database('sqlite://:memory:')
store = Store(database)
store.execute('''CREATE TABLE person (
	id INTEGER PRIMARY KEY,
	name VARCHAR,
	age INTEGER
)''')

persons = [
	Person( name=u'Amy', age=52 ),
	Person( name=u'Bob', age=48 ),
	Person( name=u'Cat', age=23 ),
	Person( name=u'Dan', age=17 ),
	Person( name=u'Edd', age=77 ),
	Person( name=u'Fan', age=65 ),
	Person( name=u'Gin', age=27 ),
	Person( name=u'Hil', age=30 ),
	Person( name=u'Iri', age=62 ),
	Person( name=u'Jac', age=18 )
]
Exemplo n.º 33
0
class PatchTest(MockerTestCase):

    def setUp(self):
        super(PatchTest, self).setUp()

        self.patchdir = self.makeDir()
        self.pkgdir = os.path.join(self.patchdir, "mypackage")
        os.makedirs(self.pkgdir)

        f = open(os.path.join(self.pkgdir, "__init__.py"), "w")
        f.write("shared_data = []")
        f.close()

        # Order of creation here is important to try to screw up the
        # patch ordering, as os.listdir returns in order of mtime (or
        # something).
        for pname, data in [("patch_380.py", patch_test_1),
                            ("patch_42.py", patch_test_0)]:
            self.add_module(pname, data)

        sys.path.append(self.patchdir)

        self.filename = self.makeFile()
        self.uri = "sqlite:///%s" % self.filename
        self.store = Store(create_database(self.uri))

        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")

        self.assertFalse(self.store.get(Patch, (42)))
        self.assertFalse(self.store.get(Patch, (380)))

        import mypackage
        self.mypackage = mypackage

        # Create another connection just to keep track of the state of the
        # whole transaction manager.  See the assertion functions below.
        self.another_store = Store(create_database("sqlite:"))
        self.another_store.execute("CREATE TABLE test (id INT)")
        self.another_store.commit()
        self.prepare_for_transaction_check()

        class Committer(object):

            def commit(committer):
                self.store.commit()
                self.another_store.commit()

            def rollback(committer):
                self.store.rollback()
                self.another_store.rollback()

        self.committer = Committer()
        self.patch_applier = PatchApplier(self.store, self.mypackage,
                                          self.committer)

    def tearDown(self):
        super(PatchTest, self).tearDown()
        self.committer.rollback()
        sys.path.remove(self.patchdir)
        for name in list(sys.modules):
            if name == "mypackage" or name.startswith("mypackage."):
                del sys.modules[name]

    def add_module(self, module_filename, contents):
        filename = os.path.join(self.pkgdir, module_filename)
        file = open(filename, "w")
        file.write(contents)
        file.close()

    def remove_all_modules(self):
        for filename in os.listdir(self.pkgdir):
            os.unlink(os.path.join(self.pkgdir, filename))

    def prepare_for_transaction_check(self):
        self.another_store.execute("DELETE FROM test")
        self.another_store.execute("INSERT INTO test VALUES (1)")

    def assert_transaction_committed(self):
        self.another_store.rollback()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, (1,),
                          "Transaction manager wasn't committed.")

    def assert_transaction_aborted(self):
        self.another_store.commit()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, None,
                          "Transaction manager wasn't aborted.")

    def test_apply(self):
        """
        L{PatchApplier.apply} executes the patch with the given version.
        """
        self.patch_applier.apply(42)

        x = getattr(self.mypackage, "patch_42").x
        self.assertEquals(x, 42)
        self.assertTrue(self.store.get(Patch, (42)))
        self.assertTrue("mypackage.patch_42" in sys.modules)

        self.assert_transaction_committed()

    def test_apply_all(self):
        """
        L{PatchApplier.apply_all} executes all unapplied patches.
        """
        self.patch_applier.apply_all()

        self.assertTrue("mypackage.patch_42" in sys.modules)
        self.assertTrue("mypackage.patch_380" in sys.modules)

        x = getattr(self.mypackage, "patch_42").x
        y = getattr(self.mypackage, "patch_380").y

        self.assertEquals(x, 42)
        self.assertEquals(y, 380)

        self.assert_transaction_committed()

    def test_apply_exploding_patch(self):
        """
        L{PatchApplier.apply} aborts the transaction if the patch fails.
        """
        self.remove_all_modules()
        self.add_module("patch_666.py", patch_explosion)
        self.assertRaises(StormError, self.patch_applier.apply, 666)

        self.assert_transaction_aborted()

    def test_wb_apply_all_exploding_patch(self):
        """
        When a patch explodes the store is rolled back to make sure
        that any changes the patch made to the database are removed.
        Any other patches that have been applied successfully before
        it should not be rolled back.  Any patches pending after the
        exploding patch should remain unapplied.
        """
        self.add_module("patch_666.py", patch_explosion)
        self.add_module("patch_667.py", patch_after_explosion)
        self.assertEquals(list(self.patch_applier._get_unapplied_versions()),
                          [42, 380, 666, 667])
        self.assertRaises(StormError, self.patch_applier.apply_all)
        self.assertEquals(list(self.patch_applier._get_unapplied_versions()),
                          [666, 667])

    def test_mark_applied(self):
        """
        L{PatchApplier.mark} marks a patch has applied by inserting a new row
        in the patch table.
        """
        self.patch_applier.mark_applied(42)

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertFalse(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_mark_applied_all(self):
        """
        L{PatchApplier.mark_applied_all} marks all pending patches as applied.
        """
        self.patch_applier.mark_applied_all()

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertTrue(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_application_order(self):
        """
        L{PatchApplier.apply_all} applies the patches in increasing version
        order.
        """
        self.patch_applier.apply_all()
        self.assertEquals(self.mypackage.shared_data,
                          [42, 380])

    def test_has_pending_patches(self):
        """
        L{PatchApplier.has_pending_patches} returns C{True} if there are
        patches to be applied, C{False} otherwise.
        """
        self.assertTrue(self.patch_applier.has_pending_patches())
        self.patch_applier.apply_all()
        self.assertFalse(self.patch_applier.has_pending_patches())

    def test_abort_if_unknown_patches(self):
        """
        L{PatchApplier.mark_applied} raises and error if the patch table
        contains patches without a matching file in the patch module.
        """
        self.patch_applier.mark_applied(381)
        self.assertRaises(UnknownPatchError, self.patch_applier.apply_all)

    def test_get_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns the versions of all
        unapplied patches.
        """
        patches = [Patch(42), Patch(380), Patch(381)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set([381]),
                         patch_applier.get_unknown_patch_versions())

    def test_no_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns an empty set if
        no patches are unapplied.
        """
        patches = [Patch(42), Patch(380)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set(), patch_applier.get_unknown_patch_versions())

    def test_patch_with_incorrect_apply(self):
        """
        L{PatchApplier.apply_all} raises an error as soon as one of the patches
        to be applied fails.
        """
        self.add_module("patch_999.py", patch_no_args_apply)
        try:
            self.patch_applier.apply_all()
        except BadPatchError, e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("takes no arguments" in str(e))
            self.assertTrue("TypeError" in str(e))
        else:
Exemplo n.º 34
0
class ZStormResourceManagerTest(TestHelper):

    def is_supported(self):
        return has_zope and has_testresources

    def setUp(self):
        super(ZStormResourceManagerTest, self).setUp()
        self._package_dir = self.makeDir()
        sys.path.append(self._package_dir)
        patch_dir = os.path.join(self._package_dir, "patch_package")
        os.mkdir(patch_dir)
        self.makeFile(path=os.path.join(patch_dir, "__init__.py"), content="")
        self.makeFile(path=os.path.join(patch_dir, "patch_1.py"),
                      content=PATCH)
        import patch_package
        create = ["CREATE TABLE test (foo TEXT UNIQUE, bar INT)"]
        drop = ["DROP TABLE test"]
        delete = ["DELETE FROM test"]
        schema = ZSchema(create, drop, delete, patch_package)
        uri = "sqlite:///%s" % self.makeFile()
        self.resource = ZStormResourceManager({"test": (uri, schema)})
        self.store = Store(create_database(uri))

    def tearDown(self):
        del sys.modules["patch_package"]
        sys.path.remove(self._package_dir)
        if "patch_1" in sys.modules:
            del sys.modules["patch_1"]
        super(ZStormResourceManagerTest, self).tearDown()

    def test_make(self):
        """
        L{ZStormResourceManager.make} returns a L{ZStorm} resource that can be
        used to get the registered L{Store}s.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo, bar FROM test")))

    def test_make_upgrade(self):
        """
        L{ZStormResourceManager.make} upgrades the schema if needed.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT bar FROM test")))

    def test_make_delete(self):
        """
        L{ZStormResourceManager.make} deletes the data from all tables to make
        sure that tests run against a clean database.
        """
        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")
        self.store.execute("CREATE TABLE test (foo TEXT)")
        self.store.execute("INSERT INTO test (foo) VALUES ('data')")
        self.store.commit()
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        self.assertEqual([], list(store.execute("SELECT foo FROM test")))

    def test_clean_flush(self):
        """
        L{ZStormResourceManager.clean} tries to flush the stores to make sure
        that they are all in a consistent state.
        """

        class Test(object):
            __storm_table__ = "test"
            foo = Unicode()
            bar = Int(primary=True)

            def __init__(self, foo, bar):
                self.foo = foo
                self.bar = bar

        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.add(Test(u"data", 1))
        store.add(Test(u"data", 2))
        self.assertRaises(IntegrityError, self.resource.clean, zstorm)

    def test_clean_delete(self):
        """
        L{ZStormResourceManager.clean} cleans the database tables from the data
        created by the tests.
        """
        zstorm = self.resource.make([])
        store = zstorm.get("test")
        store.execute("INSERT INTO test (foo, bar) VALUES ('data', 123)")
        store.commit()
        self.resource.clean(zstorm)
        self.assertEqual([], list(self.store.execute("SELECT * FROM test")))
Exemplo n.º 35
0
def db_update_schema(database):
    """
        Check for pending database schema updates.
        If any are found, apply them and bump the version.
    """

    # Connect to the database
    db_store = Store(database)

    # Check if the DB schema has been loaded
    db_exists = False
    try:
        db_store.execute(Select(DBSchema.version))
        db_exists = True
    except:
        db_store.rollback()
        logging.debug("Failed to query schema table.")

    if not db_exists:
        logging.info("Creating database")
        schema_file = sorted(glob.glob("schema/schema-*.sql"))[-1]
        schema_version = schema_file.split(".")[0].split("-")[-1]
        logging.debug("Using '%s' to deploy schema '%s'" % (schema_file,
                                                            schema_version))
        with open(schema_file, "r") as fd:
            try:
                for line in fd.read().replace("\n", "").split(";"):
                    if not line:
                        continue
                    db_store.execute("%s;" % line)
                    db_commit(db_store)
                logging.info("Database created")
            except:
                logging.critical("Failed to initialize the database")
                return False

    # Get schema version
    version = db_store.execute(Select(Max(DBSchema.version))).get_one()[0]
    if not version:
        logging.critical("No schema version.")
        return False

    # Apply updates
    for update_file in sorted(glob.glob("schema/update-*.sql")):
        update_version = update_file.split(".")[0].split("-")[-1]
        if int(update_version) > version:
            logging.info("Using '%s' to deploy update '%s'" % (update_file,
                                                               update_version))
            with open(update_file, "r") as fd:
                try:
                    for line in fd.read().replace("\n", "").split(";"):
                        if not line:
                            continue
                        db_store.execute("%s;" % line)
                        db_commit(db_store)
                except:
                    logging.critical("Failed to load schema update")
                    return False

    # Get schema version
    new_version = db_store.execute(Select(Max(DBSchema.version))).get_one()[0]
    if new_version > version:
        logging.info("Database schema successfuly updated from '%s' to '%s'" %
                     (version, new_version))

    db_store.close()
Exemplo n.º 36
0
class PatchApplierTest(MockerTestCase):
    def setUp(self):
        super(PatchApplierTest, self).setUp()

        self.patchdir = self.makeDir()
        self.pkgdir = os.path.join(self.patchdir, "mypackage")
        os.makedirs(self.pkgdir)

        f = open(os.path.join(self.pkgdir, "__init__.py"), "w")
        f.write("shared_data = []")
        f.close()

        # Order of creation here is important to try to screw up the
        # patch ordering, as os.listdir returns in order of mtime (or
        # something).
        for pname, data in [("patch_380.py", patch_test_1),
                            ("patch_42.py", patch_test_0)]:
            self.add_module(pname, data)

        sys.path.append(self.patchdir)

        self.filename = self.makeFile()
        self.uri = "sqlite:///%s" % self.filename
        self.store = Store(create_database(self.uri))

        self.store.execute("CREATE TABLE patch "
                           "(version INTEGER NOT NULL PRIMARY KEY)")

        self.assertFalse(self.store.get(Patch, (42)))
        self.assertFalse(self.store.get(Patch, (380)))

        import mypackage
        self.mypackage = mypackage
        self.patch_set = PatchSet(mypackage)

        # Create another connection just to keep track of the state of the
        # whole transaction manager.  See the assertion functions below.
        self.another_store = Store(create_database("sqlite:"))
        self.another_store.execute("CREATE TABLE test (id INT)")
        self.another_store.commit()
        self.prepare_for_transaction_check()

        class Committer(object):
            def commit(committer):
                self.store.commit()
                self.another_store.commit()

            def rollback(committer):
                self.store.rollback()
                self.another_store.rollback()

        self.committer = Committer()
        self.patch_applier = PatchApplier(self.store, self.patch_set,
                                          self.committer)

    def tearDown(self):
        super(PatchApplierTest, self).tearDown()
        self.committer.rollback()
        sys.path.remove(self.patchdir)
        for name in list(sys.modules):
            if name == "mypackage" or name.startswith("mypackage."):
                del sys.modules[name]

    def add_module(self, module_filename, contents):
        filename = os.path.join(self.pkgdir, module_filename)
        file = open(filename, "w")
        file.write(contents)
        file.close()

    def remove_all_modules(self):
        for filename in os.listdir(self.pkgdir):
            os.unlink(os.path.join(self.pkgdir, filename))

    def prepare_for_transaction_check(self):
        self.another_store.execute("DELETE FROM test")
        self.another_store.execute("INSERT INTO test VALUES (1)")

    def assert_transaction_committed(self):
        self.another_store.rollback()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, (1, ),
                          "Transaction manager wasn't committed.")

    def assert_transaction_aborted(self):
        self.another_store.commit()
        result = self.another_store.execute("SELECT * FROM test").get_one()
        self.assertEquals(result, None, "Transaction manager wasn't aborted.")

    def test_apply(self):
        """
        L{PatchApplier.apply} executes the patch with the given version.
        """
        self.patch_applier.apply(42)

        x = getattr(self.mypackage, "patch_42").x
        self.assertEquals(x, 42)
        self.assertTrue(self.store.get(Patch, (42)))
        self.assertTrue("mypackage.patch_42" in sys.modules)

        self.assert_transaction_committed()

    def test_apply_with_patch_directory(self):
        """
        If the given L{PatchSet} uses sub-level patches, then the
        L{PatchApplier.apply} method will look at the per-patch directory and
        apply the relevant sub-level patch.
        """
        path = os.path.join(self.pkgdir, "patch_99")
        self.makeDir(path=path)
        self.makeFile(content="", path=os.path.join(path, "__init__.py"))
        self.makeFile(content=patch_test_0, path=os.path.join(path, "foo.py"))
        self.patch_set._sub_level = "foo"
        self.add_module("patch_99/foo.py", patch_test_0)
        self.patch_applier.apply(99)
        self.assertTrue(self.store.get(Patch, (99)))

    def test_apply_all(self):
        """
        L{PatchApplier.apply_all} executes all unapplied patches.
        """
        self.patch_applier.apply_all()

        self.assertTrue("mypackage.patch_42" in sys.modules)
        self.assertTrue("mypackage.patch_380" in sys.modules)

        x = getattr(self.mypackage, "patch_42").x
        y = getattr(self.mypackage, "patch_380").y

        self.assertEquals(x, 42)
        self.assertEquals(y, 380)

        self.assert_transaction_committed()

    def test_apply_exploding_patch(self):
        """
        L{PatchApplier.apply} aborts the transaction if the patch fails.
        """
        self.remove_all_modules()
        self.add_module("patch_666.py", patch_explosion)
        self.assertRaises(StormError, self.patch_applier.apply, 666)

        self.assert_transaction_aborted()

    def test_wb_apply_all_exploding_patch(self):
        """
        When a patch explodes the store is rolled back to make sure
        that any changes the patch made to the database are removed.
        Any other patches that have been applied successfully before
        it should not be rolled back.  Any patches pending after the
        exploding patch should remain unapplied.
        """
        self.add_module("patch_666.py", patch_explosion)
        self.add_module("patch_667.py", patch_after_explosion)
        self.assertEquals(list(self.patch_applier.get_unapplied_versions()),
                          [42, 380, 666, 667])
        self.assertRaises(StormError, self.patch_applier.apply_all)
        self.assertEquals(list(self.patch_applier.get_unapplied_versions()),
                          [666, 667])

    def test_mark_applied(self):
        """
        L{PatchApplier.mark} marks a patch has applied by inserting a new row
        in the patch table.
        """
        self.patch_applier.mark_applied(42)

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertFalse(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_mark_applied_all(self):
        """
        L{PatchApplier.mark_applied_all} marks all pending patches as applied.
        """
        self.patch_applier.mark_applied_all()

        self.assertFalse("mypackage.patch_42" in sys.modules)
        self.assertFalse("mypackage.patch_380" in sys.modules)

        self.assertTrue(self.store.get(Patch, 42))
        self.assertTrue(self.store.get(Patch, 380))

        self.assert_transaction_committed()

    def test_application_order(self):
        """
        L{PatchApplier.apply_all} applies the patches in increasing version
        order.
        """
        self.patch_applier.apply_all()
        self.assertEquals(self.mypackage.shared_data, [42, 380])

    def test_has_pending_patches(self):
        """
        L{PatchApplier.has_pending_patches} returns C{True} if there are
        patches to be applied, C{False} otherwise.
        """
        self.assertTrue(self.patch_applier.has_pending_patches())
        self.patch_applier.apply_all()
        self.assertFalse(self.patch_applier.has_pending_patches())

    def test_abort_if_unknown_patches(self):
        """
        L{PatchApplier.mark_applied} raises and error if the patch table
        contains patches without a matching file in the patch module.
        """
        self.patch_applier.mark_applied(381)
        self.assertRaises(UnknownPatchError, self.patch_applier.apply_all)

    def test_get_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns the versions of all
        unapplied patches.
        """
        patches = [Patch(42), Patch(380), Patch(381)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set([381]),
                         patch_applier.get_unknown_patch_versions())

    def test_no_unknown_patch_versions(self):
        """
        L{PatchApplier.get_unknown_patch_versions} returns an empty set if
        no patches are unapplied.
        """
        patches = [Patch(42), Patch(380)]
        my_store = MockPatchStore("database", patches=patches)
        patch_applier = PatchApplier(my_store, self.mypackage)
        self.assertEqual(set(), patch_applier.get_unknown_patch_versions())

    def test_patch_with_incorrect_apply(self):
        """
        L{PatchApplier.apply_all} raises an error as soon as one of the patches
        to be applied fails.
        """
        self.add_module("patch_999.py", patch_no_args_apply)
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("takes no arguments" in str(e))
            self.assertTrue("TypeError" in str(e))
        else:
            self.fail("BadPatchError not raised")

    def test_patch_with_missing_apply(self):
        """
        L{PatchApplier.apply_all} raises an error if one of the patches to
        to be applied has no 'apply' function defined.
        """
        self.add_module("patch_999.py", patch_missing_apply)
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("no attribute" in str(e))
            self.assertTrue("AttributeError" in str(e))
        else:
            self.fail("BadPatchError not raised")

    def test_patch_with_syntax_error(self):
        """
        L{PatchApplier.apply_all} raises an error if one of the patches to
        to be applied contains a syntax error.
        """
        self.add_module("patch_999.py", "that's not python")
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue(" 999 " in str(e))
            self.assertTrue("SyntaxError" in str(e))
        else:
            self.fail("BadPatchError not raised")

    def test_patch_error_includes_traceback(self):
        """
        The exception raised by L{PatchApplier.apply_all} when a patch fails
        include the relevant traceback from the patch.
        """
        self.add_module("patch_999.py", patch_name_error)
        try:
            self.patch_applier.apply_all()
        except BadPatchError as e:
            self.assertTrue("mypackage/patch_999.py" in str(e))
            self.assertTrue("NameError" in str(e))
            self.assertTrue("blah" in str(e))
            formatted = traceback.format_exc()
            self.assertTrue("# Comment" in formatted)
        else:
            self.fail("BadPatchError not raised")
Exemplo n.º 37
0
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """

    def __init__(self, table_history, old_db_file, new_db_file, start_ver):
        self.table_history = table_history
        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        for k, v in table_history.iteritems():
            length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSettings.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSettings.db_schema_file))

            if not os.access(GLSettings.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSettings.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSettings.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg('OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):
        table_index = (version - FIRST_DATABASE_VERSION_SUPPORTED)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError('Version supplied must be less or equal to {}'.format(
                DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        objs_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)
        ).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, objs_count))

        old_objects = self.store_old.find(self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(self.debug_info, table_name))

        old_obj = self.store_old.find(self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        Stats has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        Field has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldAttr(self):
        """
        FieldAttr has been created between 22 and 23!
        """
        if self.start_ver < 23:
            return

        self._perform_copy_list("FieldAttr")

    def migrate_FieldOption(self):
        """
        FieldOption has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        FieldField has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        Step has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        StepField has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        Anomalies has been created between 14 and 15!
        should be dropped before 22
        """
        if self.start_ver < 23:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        EventLogs has been created between 15 and 16!
        should be dropped before 20
        """
        if self.start_ver < 20:
            return

        self._perform_copy_list("EventLogs")
Exemplo n.º 38
0
class SchemaTest(MockerTestCase):

    def setUp(self):
        super(SchemaTest, self).setUp()
        self.database = create_database("sqlite:///%s" % self.makeFile())
        self.store = Store(self.database)

        self._package_dirs = set()
        self._package_names = set()
        self.package = self.create_package(self.makeDir(), "patch_package")
        import patch_package

        creates = ["CREATE TABLE person (id INTEGER, name TEXT)"]
        drops = ["DROP TABLE person"]
        deletes = ["DELETE FROM person"]

        self.schema = Schema(creates, drops, deletes, patch_package)

    def tearDown(self):
        for package_dir in self._package_dirs:
            sys.path.remove(package_dir)

        for name in list(sys.modules):
            if name in self._package_names:
                del sys.modules[name]
            elif filter(
                None,
                [name.startswith("%s." % x) for x in self._package_names]):
                del sys.modules[name]

        super(SchemaTest, self).tearDown()

    def create_package(self, base_dir, name, init_module=None):
        """Create a Python package.

        Packages created using this method will be removed from L{sys.path}
        and L{sys.modules} during L{tearDown}.

        @param package_dir: The directory in which to create the new package.
        @param name: The name of the package.
        @param init_module: Optionally, the text to include in the __init__.py
            file.
        @return: A L{Package} instance that can be used to create modules.
        """
        package_dir = os.path.join(base_dir, name)
        self._package_names.add(name)
        os.makedirs(package_dir)

        file = open(os.path.join(package_dir, "__init__.py"), "w")
        if init_module:
            file.write(init_module)
        file.close()
        sys.path.append(base_dir)
        self._package_dirs.add(base_dir)

        return Package(package_dir, name)

    def test_create(self):
        """
        L{Schema.create} can be used to create the tables of a L{Store}.
        """
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_drop(self):
        """
        L{Schema.drop} can be used to drop the tables of a L{Store}.
        """
        self.schema.create(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])
        self.schema.drop(self.store)
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")

    def test_delete(self):
        """
        L{Schema.delete} can be used to clear the tables of a L{Store}.
        """
        self.schema.create(self.store)
        self.store.execute("INSERT INTO person (id, name) VALUES (1, 'Jane')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane")])
        self.schema.delete(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_creates_schema(self):
        """
        L{Schema.upgrade} creates a schema from scratch if no exist, and is
        effectively equivalent to L{Schema.create} in such case.
        """
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_marks_patches_applied(self):
        """
        L{Schema.upgrade} updates the patch table after applying the needed
        patches.
        """
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        statement = "SELECT * FROM patch"
        self.assertRaises(StormError, self.store.execute, statement)
        self.schema.upgrade(self.store)
        self.assertEquals(list(self.store.execute("SELECT * FROM patch")),
                          [(1,)])

    def test_upgrade_applies_patches(self):
        """
        L{Schema.upgrade} executes the needed patches, that typically modify
        the existing schema.
        """
        self.schema.create(self.store)
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.schema.upgrade(self.store)
        self.store.execute(
            "INSERT INTO person (id, name, phone) VALUES (1, 'Jane', '123')")
        self.assertEquals(list(self.store.execute("SELECT * FROM person")),
                          [(1, u"Jane", u"123")])
Exemplo n.º 39
0
class IssuesLog():
    def __init__(self, backend_name):
        self.backend_name = backend_name
        self.connect()
        self.create_db()

    def connect(self):
        opts = Config()

        self.database = create_database('mysql://' + opts.db_user_out + ':' +
                                        opts.db_password_out + '@' +
                                        opts.db_hostname_out + ':' +
                                        opts.db_port_out + '/' +
                                        opts.db_database_out)
        self.store = Store(self.database)

    def create_db(self):
        print("self.backend_name = %s" % (self.backend_name))
        if self.backend_is_bugzilla():
            self.store.execute(__sql_table_bugzilla__)
        elif self.backend_is_jira():
            self.store.execute(__sql_table_jira__)

    def copy_issue(self, db_ilog):
        """
        This method creates a copy of DBBugzilla/JiraIssuesLog object
        """

        if self.backend_is_bugzilla():
            aux = DBBugzillaIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            #aux = DBBugzillaIssuesLog (db_ilog.issue_id)
            aux.alias = db_ilog.alias
            aux.delta_ts = db_ilog.delta_ts
            aux.reporter_accessible = db_ilog.reporter_accessible
            aux.cclist_accessible = db_ilog.cclist_accessible
            aux.classification_id = db_ilog.classification_id
            aux.classification = db_ilog.classification
            aux.product = db_ilog.product
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.rep_platform = db_ilog.rep_platform
            aux.op_sys = db_ilog.op_sys
            aux.dup_id = db_ilog.dup_id
            aux.bug_file_loc = db_ilog.bug_file_loc
            aux.status_whiteboard = db_ilog.status_whiteboard
            aux.target_milestone = db_ilog.target_milestone
            aux.votes = db_ilog.votes
            aux.everconfirmed = db_ilog.everconfirmed
            aux.qa_contact = db_ilog.qa_contact
            aux.estimated_time = db_ilog.estimated_time
            aux.remaining_time = db_ilog.remaining_time
            aux.actual_time = db_ilog.actual_time
            aux.deadline = db_ilog.deadline
            aux.keywords = db_ilog.keywords
            aux.cc = db_ilog.cc
            aux.group_bugzilla = db_ilog.group_bugzilla
            aux.flag = db_ilog.flag
            return aux

        elif self.backend_is_jira():
            aux = DBJiraIssuesLog(db_ilog.issue, db_ilog.tracker_id)
            aux.issue_id = db_ilog.issue_id
            aux.type = db_ilog.type
            aux.summary = db_ilog.summary
            aux.description = db_ilog.description
            aux.status = db_ilog.status
            aux.resolution = db_ilog.resolution
            aux.priority = db_ilog.priority
            aux.submitted_by = db_ilog.submitted_by
            aux.date = db_ilog.date
            aux.assigned_to = db_ilog.assigned_to

            aux.link = db_ilog.link
            aux.component = db_ilog.component
            aux.version = db_ilog.version
            aux.issue_key = db_ilog.issue_key
            aux.environment = db_ilog.environment
            aux.project = db_ilog.project
            aux.project_key = db_ilog.project_key
            aux.security = db_ilog.security

            return aux

    def get_people_id(self, email, tracker_id):
        """
        Gets the id of an user
        """
        p = self.store.find(DBPeople, DBPeople.email == email).one()
        ##
        ## the code below was created ad-hoc for KDE solid
        ##
        try:
            return p.id
        except AttributeError:
            p = self.store.find(DBPeople, DBPeople.user_id == email).one()
            try:
                return p.id
            except AttributeError:
                # no person was found in People with the email above, so
                # we include it
                printdbg("Person not found. Inserted with email %s " % (email))
                dp = DBPeople(email, tracker_id)
                self.store.add(dp)
                self.store.commit()
                return dp.id

    def get_last_change_date(self):
        """
        This method gets the date of the last change included in the log table
        """
        if self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog)
            aux = result.order_by(Desc(DBBugzillaIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        elif self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog)
            aux = result.order_by(Desc(DBJiraIssuesLog.date))[:1]
            for entry in aux:
                return entry.date
        return None

    def get_issues_changed_since(self, date):
        """
        This method fetchs the issues changes since date
        """

        #SELECT DISTINCT(issues.id) FROM issues, changes
        #WHERE issues.id = changes.issue_id
        #AND (issues.submitted_on >= '2012-02-28 12:34:44'
        #    OR changes.changed_on >= '2012-02-28 12:34:44');

        result = self.store.find(
            DBIssue, DBChange.issue_id == DBIssue.id,
            Or(DBIssue.submitted_on > date,
               DBChange.changed_on > date)).group_by(DBIssue.id)

        return result

    def get_previous_state(self, issue_id):
        """
        This method returns a db_ilog object with the last row found in
        the log table
        """
        db_ilog = None
        if self.backend_is_jira():
            rows = self.store.find(DBJiraIssuesLog,
                                   DBJiraIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBJiraIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBJiraIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.issue_key = aux.issue_key
                db_ilog.link = aux.link
                db_ilog.environment = aux.environment
                db_ilog.security = aux.security
                db_ilog.updated = aux.updated
                db_ilog.version = aux.version
                db_ilog.component = aux.component
                db_ilog.votes = aux.votes
                db_ilog.project = aux.project
                db_ilog.project_id = aux.project_id
                db_ilog.project_key = aux.project_key
        else:  # elif self.backend_is_bugzilla():
            rows = self.store.find(DBBugzillaIssuesLog,
                                   DBBugzillaIssuesLog.issue_id == issue_id)
            lrow = rows.order_by(Desc(DBBugzillaIssuesLog.id))[:1]
            for aux in lrow:  # FIXME it only contains an element!
                db_ilog = DBBugzillaIssuesLog(aux.issue, aux.tracker_id)
                db_ilog.issue_id = aux.issue_id
                db_ilog.type = aux.type
                db_ilog.summary = aux.summary
                db_ilog.description = aux.description
                db_ilog.status = aux.status
                db_ilog.resolution = aux.resolution
                db_ilog.priority = aux.priority
                db_ilog.submitted_by = aux.submitted_by
                db_ilog.date = aux.date
                db_ilog.assigned_to = aux.assigned_to
                db_ilog.alias = aux.alias
                db_ilog.delta_ts = aux.delta_ts
                db_ilog.reporter_accessible = aux.reporter_accessible
                db_ilog.cclist_accessible = aux.cclist_accessible
                db_ilog.classification_id = aux.classification_id
                db_ilog.classification = aux.classification
                db_ilog.product = aux.product
                db_ilog.component = aux.component
                db_ilog.version = aux.version
                db_ilog.rep_platform = aux.rep_platform
                db_ilog.op_sys = aux.op_sys
                db_ilog.dup_id = aux.dup_id
                db_ilog.bug_file_loc = aux.bug_file_loc
                db_ilog.status_whiteboard = aux.status_whiteboard
                db_ilog.target_milestone = aux.target_milestone
                db_ilog.votes = aux.votes
                db_ilog.everconfirmed = aux.everconfirmed
                db_ilog.qa_contact = aux.qa_contact
                db_ilog.estimated_time = aux.estimated_time
                db_ilog.remaining_time = aux.remaining_time
                db_ilog.actual_time = aux.actual_time
                db_ilog.deadline = aux.deadline
                db_ilog.keywords = aux.keywords
                db_ilog.cc = aux.cc
                db_ilog.group_bugzilla = aux.group_bugzilla
                db_ilog.flag = aux.flag

        return db_ilog

    def issue_is_new(self, issue_id):
        """
        This method returns True if the issue is not logged in the log table
        """
        if self.backend_is_jira():
            result = self.store.find(DBJiraIssuesLog,
                                     DBJiraIssuesLog.issue_id == issue_id)
        elif self.backend_is_bugzilla():
            result = self.store.find(DBBugzillaIssuesLog,
                                     DBBugzillaIssuesLog.issue_id == issue_id)
        return (result.count() == 0)

    def build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes\
        where issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            value = self.store.execute("SELECT old_value FROM changes \
            WHERE issue_id=%s AND field=\"%s\" ORDER BY changed_on LIMIT 1" %
                                       (db_ilog.issue_id, f[0]))
            for v in value:
                if self.backend_is_bugzilla():
                    # Bugzilla section
                    #
                    if f[0] in bg_issues_links:
                        table_field = bg_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(
                                v[0], self.get_tracker_id(db_ilog.issue_id))
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'alias':
                            db_ilog.alias = v[0]
                        elif table_field == 'reporter_accessible':
                            db_ilog.reporter_accessible = v[0]
                        elif table_field == 'cclist_accessible':
                            db_ilog.cclist_accessible = v[0]
                        elif table_field == 'product':
                            db_ilog.product = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'version':
                            db_ilog.version = v[0]
                        elif table_field == 'rep_platform':
                            db_ilog.rep_platform = v[0]
                        elif table_field == 'op_sys':
                            db_ilog.op_sys = v[0]
                        elif table_field == 'bug_file_loc':
                            db_ilog.bug_file_loc = v[0]
                        elif table_field == 'status_whiteboard':
                            db_ilog.status_whiteboard = v[0]
                        elif table_field == 'target_milestone':
                            db_ilog.target_milestone = v[0]
                        elif table_field == 'votes':
                            db_ilog.votes = v[0]
                        elif table_field == 'everconfirmed':
                            db_ilog.everconfirmed = v[0]
                        elif table_field == 'qa_contact':
                            db_ilog.qa_contact = v[0]
                        elif table_field == 'keywords':
                            db_ilog.Keywords = v[0]
                        elif table_field == 'cc':
                            db_ilog.cc = v[0]
                if self.backend_is_jira():
                    # Jira section
                    #
                    if f[0] in jira_issues_links:
                        table_field = jira_issues_links[f[0]]
                        if table_field == 'summary':
                            db_ilog.summary = v[0]
                        elif table_field == 'priority':
                            db_ilog.priority = v[0]
                        elif table_field == 'type':
                            db_ilog.type = v[0]
                        elif table_field == 'assigned_to':
                            db_ilog.assigned_to = self.get_people_id(v[0])
                        elif table_field == 'status':
                            db_ilog.status = v[0]
                        elif table_field == 'resolution':
                            db_ilog.resolution = v[0]
                        elif table_field == 'link':
                            db_ilog.link = v[0]
                        elif table_field == 'environment':
                            db_ilog.environment = v[0]
                        elif table_field == 'component':
                            db_ilog.component = v[0]
                        elif table_field == 'description':
                            db_ilog.description = v[0]
                        elif table_field == 'security':
                            db_ilog.security = v[0]

        return db_ilog

    def backend_is_bugzilla(self):
        return self.backend_name == 'bg'

    def backend_is_jira(self):
        return self.backend_name == 'jira'

    def get_last_values(self, issue_row):
        i = issue_row
        db_ilog = None
        if self.backend_is_bugzilla():
            db_ilog_bugzilla = DBBugzillaIssuesLog(i.issue, i.tracker_id)
            db_ilog_bugzilla.issue_id = i.id
            db_ilog_bugzilla.type = i.type
            db_ilog_bugzilla.summary = i.summary
            db_ilog_bugzilla.description = i.description
            db_ilog_bugzilla.status = i.status
            db_ilog_bugzilla.resolution = i.resolution
            db_ilog_bugzilla.priority = i.priority
            db_ilog_bugzilla.submitted_by = i.submitted_by
            db_ilog_bugzilla.date = i.submitted_on
            db_ilog_bugzilla.assigned_to = i.assigned_to

            ib = self.store.find(DBBugzillaIssueExt, \
                                 DBBugzillaIssueExt.issue_id == db_ilog_bugzilla.issue_id).one()

            ####
            db_ilog_bugzilla.alias = ib.alias
            db_ilog_bugzilla.delta_ts = ib.delta_ts
            db_ilog_bugzilla.reporter_accessible = ib.reporter_accessible
            db_ilog_bugzilla.cclist_accessible = ib.cclist_accessible
            db_ilog_bugzilla.classification_id = ib.classification_id
            db_ilog_bugzilla.classification = ib.classification
            db_ilog_bugzilla.product = ib.product
            db_ilog_bugzilla.component = ib.component
            db_ilog_bugzilla.version = ib.version
            db_ilog_bugzilla.rep_platform = ib.rep_platform
            db_ilog_bugzilla.op_sys = ib.op_sys
            db_ilog_bugzilla.dup_id = ib.dup_id
            db_ilog_bugzilla.bug_file_loc = ib.bug_file_loc
            db_ilog_bugzilla.status_whiteboard = ib.status_whiteboard
            db_ilog_bugzilla.target_milestone = ib.target_milestone
            db_ilog_bugzilla.votes = ib.votes
            db_ilog_bugzilla.everconfirmed = ib.everconfirmed
            db_ilog_bugzilla.qa_contact = ib.qa_contact
            db_ilog_bugzilla.estimated_time = ib.estimated_time
            db_ilog_bugzilla.remaining_time = ib.remaining_time
            db_ilog_bugzilla.actual_time = ib.actual_time
            db_ilog_bugzilla.deadline = ib.deadline
            db_ilog_bugzilla.keywords = ib.keywords
            db_ilog_bugzilla.cc = ib.cc
            db_ilog_bugzilla.group_bugzilla = ib.group_bugzilla
            db_ilog_bugzilla.flag = ib.flag
            db_ilog = db_ilog_bugzilla

        elif self.backend_is_jira():
            db_ilog = DBJiraIssuesLog(i.issue, i.tracker_id)
            db_ilog.issue_id = i.id
            db_ilog.type = i.type
            db_ilog.summary = i.summary
            db_ilog.description = i.description
            db_ilog.status = i.status
            db_ilog.resolution = i.resolution
            db_ilog.priority = i.priority
            db_ilog.submitted_by = i.submitted_by
            db_ilog.date = i.submitted_on
            db_ilog.assigned_to = i.assigned_to

            ib = self.store.find(DBJiraIssueExt, \
                                 DBJiraIssueExt.issue_id == db_ilog.issue_id).one()

            db_ilog.issue_key = ib.issue_key
            db_ilog.link = ib.link
            db_ilog.environment = ib.environment
            db_ilog.security = ib.security
            db_ilog.updated = ib.updated
            db_ilog.version = ib.version
            db_ilog.component = ib.component
            db_ilog.votes = ib.votes
            db_ilog.project = ib.project
            db_ilog.project_id = ib.project_id
            db_ilog.project_key = ib.project_key

        return db_ilog

    def insert_new_bugs_created(self, date_from, date_to):
        """
        This method inserts an entry with the data of the creation time
        """
        if (not date_from) and (not date_to):
            issues = self.store.find(DBIssue)
        elif not date_from:
            issues = self.store.find(DBIssue, DBIssue.submitted_on < date_to)
        elif not date_to:
            issues = self.store.find(DBIssue, DBIssue.submitted_on > date_from)
        else:
            issues = self.store.find(
                DBIssue,
                And(DBIssue.submitted_on <= date_to,
                    DBIssue.submitted_on > date_from))

        issues = issues.order_by(Asc(DBIssue.submitted_on))
        ## we store the initial data for each bug found
        for i in issues:
            db_ilog = self.get_last_values(i)  # from issues and change tables
            db_ilog = self.build_initial_state(db_ilog)
            self.store.add(db_ilog)
            printdbg(
                "Issue #%s created at %s - date_from = %s - date_to = %s" %
                (db_ilog.issue, db_ilog.date, date_from, date_to))

    def get_tracker_id(self, issue_id):
        """
        Returns tracker id from issues
        """
        result = self.store.find(DBIssue.tracker_id,
                                 DBIssue.id == issue_id).one()
        return result

    def run(self):

        last_change_date = self.get_last_change_date()
        printdbg("Last change logged at %s" % (last_change_date))

        date_from = None
        date_to = None

        if last_change_date:
            changes = self.store.find(DBChange,
                                      DBChange.changed_on > last_change_date)
            date_from = last_change_date
        else:
            changes = self.store.find(DBChange)

        changes = changes.order_by(Asc(DBChange.changed_on))

        for ch in changes:
            # insert creation if needed
            date_to = ch.changed_on
            self.insert_new_bugs_created(date_from, date_to)
            date_from = date_to

            field = ch.field
            new_value = ch.new_value
            changed_by = ch.changed_by
            date = ch.changed_on
            issue_id = ch.issue_id

            #print("field = %s, new_value = %s, changed_by = %s, date = %s"
            #      % (field, new_value, str(changed_by), str(date)))

            db_ilog = self.get_previous_state(issue_id)

            printdbg("Issue #%s modified at %s" % (db_ilog.issue, date))

            if self.backend_is_bugzilla():
                # Bugzilla section
                #
                #
                if (field in bg_issues_links):
                    table_field = bg_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'alias':
                        db_ilog.alias = new_value
                    elif table_field == 'reporter_accessible':
                        db_ilog.reporter_accessible = new_value
                    elif table_field == 'cclist_accessible':
                        db_ilog.cclist_accessible = new_value
                    elif table_field == 'product':
                        db_ilog.product = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'rep_platform':
                        db_ilog.rep_platform = new_value
                    elif table_field == 'op_sys':
                        db_ilog.op_sys = new_value
                    elif table_field == 'bug_file_loc':
                        db_ilog.bug_file_loc = new_value
                    elif table_field == 'status_whiteboard':
                        db_ilog.status_whiteboard = new_value
                    elif table_field == 'target_milestone':
                        db_ilog.target_milestone = new_value
                    elif table_field == 'votes':
                        db_ilog.votes = new_value
                    elif table_field == 'everconfirmed':
                        db_ilog.everconfirmed = new_value
                    elif table_field == 'qa_contact':
                        db_ilog.qa_contact = new_value
                    elif table_field == 'keywords':
                        db_ilog.Keywords = new_value
                    elif table_field == 'cc':
                        db_ilog.cc = new_value

                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            elif self.backend_is_jira():
                # Jira section
                #
                #

                if (field in jira_issues_links):
                    table_field = jira_issues_links[field]
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    if table_field == 'summary':
                        db_ilog.summary = new_value
                    elif table_field == 'priority':
                        db_ilog.priority = new_value
                    elif table_field == 'type':
                        db_ilog.type = new_value
                    elif table_field == 'assigned_to':
                        db_ilog.assigned_to = self.get_people_id(
                            new_value, self.get_tracker_id(db_ilog.issue_id))
                    elif table_field == 'status':
                        db_ilog.status = new_value
                    elif table_field == 'resolution':
                        db_ilog.resolution = new_value
                    elif table_field == 'description':
                        db_ilog.description = new_value
                    elif table_field == 'link':
                        db_ilog.link = new_value
                    elif table_field == 'component':
                        db_ilog.component = new_value
                    elif table_field == 'version':
                        db_ilog.version = new_value
                    elif table_field == 'security':
                        db_ilog.security = new_value
                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            # if there are changes, it stores the last bugs after the last
            # change. If there are no changes, insert all the created bugs
        self.insert_new_bugs_created(date_from, None)
        self.store.commit()
Exemplo n.º 40
0
class IssuesLog:
    def __init__(self, backend_name):
        self.backend_name = backend_name
        self.connect()
        self.create_db()

    def connect(self):
        opts = Config()

        self.database = create_database(
            "mysql://"
            + opts.db_user_out
            + ":"
            + opts.db_password_out
            + "@"
            + opts.db_hostname_out
            + ":"
            + opts.db_port_out
            + "/"
            + opts.db_database_out
        )
        self.store = Store(self.database)

    def create_db(self):
        self.store.execute(__sql_table__)

    def copy_issue(self, db_ilog):
        """
        This method is create a copy of a DBIssueLog object
        """
        aux = DBIssuesLog(db_ilog.issue, db_ilog.tracker_id)
        aux.issue_id = db_ilog.issue_id
        aux.type = db_ilog.type
        aux.summary = db_ilog.summary
        aux.description = db_ilog.description
        aux.status = db_ilog.status
        aux.resolution = db_ilog.resolution
        aux.priority = db_ilog.priority
        aux.submitted_by = db_ilog.submitted_by
        aux.date = db_ilog.date
        aux.assigned_to = db_ilog.assigned_to
        return aux

    def build_initial_state(self, db_ilog):
        """
        This method gets the first changes of every field in
        order to get the initial state of the bug
        """
        fields = self.store.execute("SELECT DISTINCT(field) FROM changes where issue_id=%s" % (db_ilog.issue_id))

        for f in fields:
            value = self.store.execute(
                'SELECT old_value FROM changes WHERE issue_id=%s AND field="%s" ORDER BY changed_on LIMIT 1'
                % (db_ilog.issue_id, f[0])
            )
            for v in value:
                # Bugzilla section
                #
                if f[0] in bg_issues_links:
                    table_field = bg_issues_links[f[0]]
                    if table_field == "summary":
                        db_ilog.summary = v[0]
                    elif table_field == "priority":
                        db_ilog.priority = v[0]
                    elif table_field == "assigned_to":
                        db_ilog.assigned_to = v[0]
                    elif table_field == "status":
                        db_ilog.status = v[0]
                    elif table_field == "resolution":
                        db_ilog.resolution = v[0]
        return db_ilog

    def run(self):
        issues = self.store.find(DBIssue)
        for i in issues:
            db_ilog = DBIssuesLog(i.issue, i.tracker_id)
            db_ilog.issue_id = i.id
            db_ilog.type = i.type
            db_ilog.summary = i.summary
            db_ilog.description = i.description
            db_ilog.status = i.status
            db_ilog.resolution = i.resolution
            db_ilog.priority = i.priority
            db_ilog.submitted_by = i.submitted_by
            db_ilog.date = i.submitted_on
            db_ilog.assigned_to = i.assigned_to

            db_ilog = self.build_initial_state(db_ilog)

            self.store.add(db_ilog)

            # the code below gets all the changes and insert a row per change
            changes = self.store.execute(
                "SELECT field, new_value, changed_by, changed_on FROM changes where issue_id=%s" % (db_ilog.issue_id)
            )

            for ch in changes:
                field = ch[0]
                new_value = ch[1]
                changed_by = ch[2]
                date = ch[3]

                db_ilog = self.copy_issue(db_ilog)

                # Bugzilla section
                #
                if field in bg_issues_links:
                    table_field = bg_issues_links[field]
                    if table_field == "summary":
                        db_ilog.summary = new_value
                    elif table_field == "priority":
                        db_ilog.priority = new_value
                    elif table_field == "assignted_to":
                        db_ilog.assigned_to = new_value
                    elif table_field == "status":
                        db_ilog.status = new_value
                    elif table_field == "resolution":
                        db_ilog.resolution = new_value
                    db_ilog.submitted_by = changed_by
                    db_ilog.date = date

                    try:
                        self.store.add(db_ilog)
                    except:
                        traceback.print_exc()

            self.store.commit()
Exemplo n.º 41
0
class TableReplacer(object):
    """
    This is the base class used by every Updater
    """
    def __init__(self, old_db_file, new_db_file, start_ver):
        from globaleaks.db.update_8_9 import Context_v_8, Receiver_v_8, Notification_v_8
        from globaleaks.db.update_9_10 import Node_v_9, Receiver_v_9, User_v_9
        from globaleaks.db.update_10_11 import InternalTip_v_10, InternalFile_v_10
        from globaleaks.db.update_11_12 import Node_v_11, Context_v_11
        from globaleaks.db.update_12_13 import Node_v_12, Context_v_12
        from globaleaks.db.update_13_14 import Node_v_13, Context_v_13
        from globaleaks.db.update_14_15 import Node_v_14, User_v_14, Context_v_14, Receiver_v_14, \
            InternalTip_v_14, Notification_v_14, Stats_v_14, Comment_v_14
        from globaleaks.db.update_15_16 import Receiver_v_15, Notification_v_15
        from globaleaks.db.update_16_17 import Node_v_16, Receiver_v_16, Notification_v_16, Stats_v_16
        from globaleaks.db.update_17_18 import Node_v_17
        from globaleaks.db.update_18_19 import Node_v_18

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [
                Node_v_9, None, Node_v_11, None, Node_v_12, Node_v_13,
                Node_v_14, Node_v_16, None, Node_v_17, Node_v_18, models.Node
            ],
            'User': [
                User_v_9, None, User_v_14, None, None, None, None, models.User,
                None, None, None, None
            ],
            'Context': [
                Context_v_8, Context_v_11, None, None, Context_v_12,
                Context_v_13, Context_v_14, models.Context, None, None, None,
                None
            ],
            'Receiver': [
                Receiver_v_8, Receiver_v_9, Receiver_v_14, None, None, None,
                None, Receiver_v_15, Receiver_v_16, models.Receiver, None, None
            ],
            'ReceiverFile': [
                models.ReceiverFile, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Notification': [
                Notification_v_8, Notification_v_14, None, None, None, None,
                None, Notification_v_15, Notification_v_16,
                models.Notification, None, None
            ],
            'Comment': [
                Comment_v_14, None, None, None, None, None, None,
                models.Comment, None, None, None, None
            ],
            'InternalTip': [
                InternalTip_v_10, None, None, InternalTip_v_14, None, None,
                None, models.InternalTip, None, None, None, None
            ],
            'InternalFile': [
                InternalFile_v_10, None, None, models.InternalFile, None, None,
                None, None, None, None, None, None
            ],
            'WhistleblowerTip': [
                models.WhistleblowerTip, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'ReceiverTip': [
                models.ReceiverTip, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'ReceiverInternalTip': [
                models.ReceiverInternalTip, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'ReceiverContext': [
                models.ReceiverContext, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'Message': [
                models.Message, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'Stats': [
                Stats_v_14, None, None, None, None, None, None, Stats_v_16,
                None, models.Stats, None, None
            ],
            'ApplicationData': [
                models.ApplicationData, None, None, None, None, None, None,
                None, None, None, None, None
            ],
            'Field': [
                models.Field, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'FieldOption': [
                models.FieldOption, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'FieldField': [
                models.FieldField, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Step': [
                models.Step, None, None, None, None, None, None, None, None,
                None, None, None
            ],
            'StepField': [
                models.StepField, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'Anomalies': [
                models.Anomalies, None, None, None, None, None, None, None,
                None, None, None, None
            ],
            'EventLogs': [
                models.EventLogs, None, None, None, None, None, None, None,
                None, None, None, None
            ],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -8 because the relase befor the 8th are not supported anymore
            length = DATABASE_VERSION + 1 - 8
            if len(v) != length:
                msg = 'Expecting a table with {} statuses ({})'.format(
                    length, k)
                raise TypeError(msg)

        log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file))
        old_database = create_database('sqlite:' + self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database('sqlite:' + new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:
            log.msg('{} Acquire SQL schema {}'.format(
                self.debug_info, GLSetting.db_schema_file))

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                log.msg('Unable to access', GLSetting.db_schema_file)
                raise IOError('Unable to access db schema file')

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        log.msg(
                            'OperationalError in "{}"'.format(create_query))
            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                log.msg('{} OperationalError in [{}]'.format(
                    self.debug_info, create_query))
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 8)

        if table_name not in self.table_history:
            msg = 'Not implemented usage of get_right_model {} ({} {})'.format(
                __file__, table_name, self.start_ver)
            raise NotImplementedError(msg)

        if version > DATABASE_VERSION:
            raise ValueError(
                'Version supplied must be less or equal to {}'.format(
                    DATABASE_VERSION))

        if self.table_history[table_name][table_index]:
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        while version >= 0:
            if self.table_history[table_name][table_index]:
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):
        models_count = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).count()
        log.msg('{} default {} migration assistant: #{}'.format(
            self.debug_info, table_name, models_count))

        old_objects = self.store_old.find(
            self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for _, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        log.msg('{} default {} migration assistant'.format(
            self.debug_info, table_name))

        old_obj = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for _, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 14 and 15
        and is not migrated since 17
        """
        if self.start_ver < 17:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        There is no need to migrate it the application data.
        Default application data is loaded by the application
        and stored onto the db at each new start.
        """
        return

    def migrate_Field(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Field")

    def migrate_FieldOption(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldOption")

    def migrate_FieldField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("FieldField")

    def migrate_Step(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Step")

    def migrate_StepField(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("StepField")

    def migrate_Anomalies(self):
        """
        has been created between 14 and 15!
        """
        if self.start_ver < 15:
            return

        self._perform_copy_list("Anomalies")

    def migrate_EventLogs(self):
        """
        has been created between 15 and 16!
        """
        if self.start_ver < 16:
            return

        self._perform_copy_list("EventLogs")
Exemplo n.º 42
0
class SchemaTest(MakePackage, MakePath):
    """Test the Storm Schema Create, Delete, and Drop and upgrade"""
    def setUp(self):
        super(SchemaTest, self).setUp()
        sqlite_path = self.make_path("")
        self.database = create_database("sqlite:///%s" % sqlite_path)
        self.store = Store(self.database)
        self.patch_table = "my_patch_table"

        self.package = self.create_package(self.make_path(), "patch_package")
        # patch_package is created during the tests and is not around during
        # lint checks, so we'll diable the error
        import patch_package

        creates = ["CREATE TABLE person (id INTEGER, name TEXT)"]
        drops = ["DROP TABLE person"]
        deletes = ["DELETE FROM person"]

        self.schema = Schema(creates, drops, deletes, patch_package,
                             self.patch_table)

    def test_create(self):
        """Create a Schema"""
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")
        self.schema.create(self.store)
        self.assertEqual(list(self.store.execute("SELECT * FROM person")), [])

    def test_drop(self):
        """Drop a Schema"""
        self.schema.create(self.store)
        self.assertEqual(list(self.store.execute("SELECT * FROM person")), [])
        self.schema.drop(self.store)
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")

    def test_delete(self):
        """Delete a Schema"""
        self.schema.create(self.store)
        self.store.execute("INSERT INTO person (id, name) VALUES (1, 'Jane')")
        self.assertEqual(
            list(self.store.execute("SELECT * FROM person")), [(1, u"Jane")])
        self.schema.delete(self.store)
        self.assertEqual(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_creates_schema(self):
        """Upgrade a Schema, aka apply all patches"""
        self.assertRaises(StormError,
                          self.store.execute, "SELECT * FROM person")
        self.schema.upgrade(self.store)
        self.assertEqual(list(self.store.execute("SELECT * FROM person")), [])

    def test_upgrade_marks_patches_applied(self):
        """Test that an upgrade updates the patch table"""
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.assertRaises(StormError, self.store.execute,
                          "SELECT * FROM %s" % self.patch_table)
        self.schema.upgrade(self.store)
        self.assertEqual(
            list(self.store.execute("SELECT * FROM %s" % self.patch_table)),
            [(1,)])

    def test_upgrade_applies_patches(self):
        """Test that an upgrade actually applies the patches"""
        self.schema.create(self.store)
        contents = """
def apply(store):
    store.execute('ALTER TABLE person ADD COLUMN phone TEXT')
"""
        self.package.create_module("patch_1.py", contents)
        self.schema.upgrade(self.store)
        self.store.execute(
            "INSERT INTO person (id, name, phone) VALUES (1, 'Jane', '123')")
        self.assertEqual(
            list(self.store.execute("SELECT * FROM person")),
            [(1, u"Jane", u"123")])
Exemplo n.º 43
0
class TableReplacer:
    """
    This is the base class used by every Updater
    """
    def __init__(self, old_db_file, new_db_file, start_ver):

        from globaleaks.db.update_5_6 import User_version_5, Comment_version_5, Node_version_5
        from globaleaks.db.update_6_7 import Node_version_6, Context_version_6
        from globaleaks.db.update_7_8 import Node_version_7, Notification_version_7, Context_version_7, \
            Receiver_version_7, InternalFile_version_7
        from globaleaks.db.update_8_9 import Context_version_8, Receiver_version_8, Notification_version_8
        from globaleaks.db.update_9_10 import Node_version_9, ApplicationData_version_10, \
            Receiver_version_9, User_version_9
        from globaleaks.db.update_10_11 import InternalTip_version_10, InternalFile_version_10
        from globaleaks.db.update_11_12 import Node_version_11, ApplicationData_version_11, Context_version_11

        self.old_db_file = old_db_file
        self.new_db_file = new_db_file
        self.start_ver = start_ver

        self.std_fancy = " ł "
        self.debug_info = "   [%d => %d] " % (start_ver, start_ver + 1)

        self.table_history = {
            'Node': [
                Node_version_5, Node_version_6, Node_version_7, Node_version_9,
                None, Node_version_11, None, models.Node
            ],
            'User': [
                User_version_5, User_version_9, None, None, None, models.User,
                None, None
            ],
            'Context': [
                Context_version_6, None, Context_version_7, Context_version_8,
                Context_version_11, None, None, models.Context
            ],
            'Receiver': [
                Receiver_version_7, None, None, Receiver_version_8,
                Receiver_version_9, models.Receiver, None, None
            ],
            'ReceiverFile':
            [models.ReceiverFile, None, None, None, None, None, None, None],
            'Notification': [
                Notification_version_7, None, None, Notification_version_8,
                models.Notification, None, None, None
            ],
            'Comment': [
                Comment_version_5, models.Comment, None, None, None, None,
                None, None
            ],
            'InternalTip': [
                InternalTip_version_10, None, None, None, None, None,
                models.InternalTip, None
            ],
            'InternalFile': [
                InternalFile_version_7, None, None, InternalFile_version_10,
                None, None, models.InternalFile, None
            ],
            'WhistleblowerTip': [
                models.WhistleblowerTip, None, None, None, None, None, None,
                None
            ],
            'ReceiverTip':
            [models.ReceiverTip, None, None, None, None, None, None, None],
            'ReceiverInternalTip': [
                models.ReceiverInternalTip, None, None, None, None, None, None,
                None
            ],
            'ReceiverContext':
            [models.ReceiverContext, None, None, None, None, None, None, None],
            'Message':
            [models.Message, None, None, None, None, None, None, None],
            'Stats': [models.Stats, None, None, None, None, None, None, None],
            'ApplicationData': [
                ApplicationData_version_10, None, None, None, None, None, None,
                models.ApplicationData
            ],
        }

        for k, v in self.table_history.iteritems():
            # +1 because count start from 0,
            # -5 because the relase 0,1,2,3,4 are not supported anymore
            assert len(v) == (DATABASE_VERSION + 1 - 5), \
                "I'm expecting a table with %d statuses (%s)" % (DATABASE_VERSION, k)

        print "%s Opening old DB: %s" % (self.debug_info, old_db_file)
        old_database = create_database("sqlite:%s" % self.old_db_file)
        self.store_old = Store(old_database)

        GLSetting.db_file = new_db_file

        new_database = create_database("sqlite:%s" % new_db_file)
        self.store_new = Store(new_database)

        if self.start_ver + 1 == DATABASE_VERSION:

            print "%s Acquire SQL schema %s" % (self.debug_info,
                                                GLSetting.db_schema_file)

            if not os.access(GLSetting.db_schema_file, os.R_OK):
                print "Unable to access %s" % GLSetting.db_schema_file
                raise Exception("Unable to access db schema file")

            with open(GLSetting.db_schema_file) as f:
                create_queries = ''.join(f.readlines()).split(';')
                for create_query in create_queries:
                    try:
                        self.store_new.execute(create_query + ';')
                    except OperationalError:
                        print "OperationalError in [%s]" % create_query

            self.store_new.commit()
            return
            # return here and manage the migrant versions here:

        for k, v in self.table_history.iteritems():

            create_query = self.get_right_sql_version(k, self.start_ver + 1)
            if not create_query:
                # table not present in the version
                continue

            try:
                self.store_new.execute(create_query + ';')
            except OperationalError as excep:
                print "%s OperationalError in [%s]" % (self.debug_info,
                                                       create_query)
                raise excep

        self.store_new.commit()

    def close(self):
        self.store_old.close()
        self.store_new.close()

    def initialize(self):
        pass

    def epilogue(self):
        pass

    def get_right_model(self, table_name, version):

        table_index = (version - 5)

        if not self.table_history.has_key(table_name):
            print "Not implemented usage of get_right_model %s (%s %d)" % (
                __file__, table_name, self.start_ver)
            raise NotImplementedError

        assert version <= DATABASE_VERSION, "wrong developer brainsync"

        if self.table_history[table_name][table_index]:
            # print "Immediate return %s = %s at version %d" % \
            #       ( table_name, self.table_history[table_name][table_index], version )
            return self.table_history[table_name][table_index]

        # else, it's none, and we've to take the previous valid version
        #
        # print "Requested version %d of %s need to be collected in the past" %\
        #       (version, table_name)

        while version >= 0:
            if self.table_history[table_name][table_index]:
                # print ".. returning %s = %s" %\
                #           ( table_name, self.table_history[table_name][table_index] )
                return self.table_history[table_name][table_index]
            table_index -= 1

        # This never want happen
        return None

    def get_right_sql_version(self, model_name, version):
        """
        @param model_name:
        @param version:
        @return:
            The SQL right for the stuff we've
        """

        modelobj = self.get_right_model(model_name, version)
        if not modelobj:
            return None

        right_query = generateCreateQuery(modelobj)
        return right_query

    def _perform_copy_list(self, table_name):

        print "%s default %s migration assistant: #%d" % (
            self.debug_info, table_name,
            self.store_old.find(
                self.get_right_model(table_name, self.start_ver)).count())

        old_objects = self.store_old.find(
            self.get_right_model(table_name, self.start_ver))

        for old_obj in old_objects:
            new_obj = self.get_right_model(table_name, self.start_ver + 1)()

            # Storm internals simply reversed
            for k, v in new_obj._storm_columns.iteritems():
                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)

        self.store_new.commit()

    def _perform_copy_single(self, table_name):
        print "%s default %s migration assistant" % (self.debug_info,
                                                     table_name)

        old_obj = self.store_old.find(
            self.get_right_model(table_name, self.start_ver)).one()
        new_obj = self.get_right_model(table_name, self.start_ver + 1)()

        # Storm internals simply reversed
        for k, v in new_obj._storm_columns.iteritems():
            setattr(new_obj, v.name, getattr(old_obj, v.name))

        self.store_new.add(new_obj)
        self.store_new.commit()

    def migrate_Context(self):
        self._perform_copy_list("Context")

    def migrate_Node(self):
        self._perform_copy_single("Node")

    def migrate_User(self):
        self._perform_copy_list("User")

    def migrate_ReceiverTip(self):
        self._perform_copy_list("ReceiverTip")

    def migrate_WhistleblowerTip(self):
        self._perform_copy_list("WhistleblowerTip")

    def migrate_Comment(self):
        self._perform_copy_list("Comment")

    def migrate_InternalTip(self):
        self._perform_copy_list("InternalTip")

    def migrate_Receiver(self):
        self._perform_copy_list("Receiver")

    def migrate_InternalFile(self):
        self._perform_copy_list("InternalFile")

    def migrate_ReceiverFile(self):
        self._perform_copy_list("ReceiverFile")

    def migrate_Notification(self):
        self._perform_copy_single("Notification")

    def migrate_ReceiverContext(self):
        self._perform_copy_list("ReceiverContext")

    def migrate_ReceiverInternalTip(self):
        self._perform_copy_list("ReceiverInternalTip")

    def migrate_Message(self):
        """
        has been created between 7 and 8!
        """
        if self.start_ver < 8:
            return

        self._perform_copy_list("Message")

    def migrate_Stats(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("Stats")

    def migrate_ApplicationData(self):
        """
        has been created between 9 and 10!
        """
        if self.start_ver < 10:
            return

        self._perform_copy_list("ApplicationData")