def load_db(self, uri):
     global database
     global tlocal
     import threading
     tlocal = threading.local()
     if not store:
         database = create_database(uri)
         tlocal.store = Store(database)
     if self.isClosed:
         database = create_database(uri)
         tlocal.store = Store(database)
         self.isClose = False
     return tlocal.store
Exemple #2
0
    def test_recover_after_timeout(self):
        """Regression test for recovering from database locked exception.
        
        In 0.10, connection.commit() would forget that a transaction was in
        progress if an exception was raised, such as an OperationalError due to
        another connection being open.  As a result, a subsequent modification
        to the database would cause BEGIN to be issued to the database, which
        would complain that a transaction was already in progress.

        """
        # Create a database with a table.
        database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
        connection1 = database.connect()
        connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
        connection1.commit()

        # Put some data in, but also make a second connection to the database,
        # which will prevent a commit until it is closed.
        connection1.execute("INSERT INTO test VALUES (1)")
        connection2 = database.connect()
        connection2.execute("SELECT id FROM test")
        self.assertRaises(OperationalError, connection1.commit)

        # Close the second connection - it should now be possible to commit.
        connection2.close()

        # In 0.10, the next statement raised OperationalError: cannot start a
        # transaction within a transaction
        connection1.execute("INSERT INTO test VALUES (2)")
        connection1.commit()

        # Check that the correct data is present
        self.assertEquals(connection1.execute("SELECT id FROM test").get_all(),
                          [(1,), (2,)])
Exemple #3
0
 def setUp(self):
     super(PostgresTimeoutTracerTest, self).setUp()
     self.database = create_database(os.environ["STORM_POSTGRES_URI"])
     self.connection = self.database.connect()
     install_tracer(self.tracer)
     self.tracer.get_remaining_time = lambda: self.remaining_time
     self.remaining_time = 10.5
Exemple #4
0
 def setUp(self):
     super(PostgresTimeoutTracerTest, self).setUp()
     self.database = create_database(os.environ["STORM_POSTGRES_URI"])
     self.connection = self.database.connect()
     install_tracer(self.tracer)
     self.tracer.get_remaining_time = lambda: self.remaining_time
     self.remaining_time = 10.5
Exemple #5
0
    def _wrap(self, function, *args, **kwargs):
        """
        Wrap provided function calling it inside a thread and
        passing the store to it.
        """
        with transact_lock:
            start_time = datetime.now()
            store = Store(create_database(GLSettings.db_uri))

            try:
                if self.instance:
                    result = function(self.instance, store, *args, **kwargs)
                else:
                    result = function(store, *args, **kwargs)

                store.commit()
            except:
                store.rollback()
                raise
            else:
                return result
            finally:
                store.reset()
                store.close()

                duration = timedelta_to_milliseconds(datetime.now() -
                                                     start_time)
                msg = "Query [%s] executed in %.1fms" % (self.method.__name__,
                                                         duration)
                if duration > self.timelimit:
                    log.err(msg)
                    schedule_exception_email(msg)
                else:
                    log.debug(msg)
    def test_commit_timeout(self):
        """Regression test for commit observing the timeout.
        
        In 0.10, the timeout wasn't observed for connection.commit().

        """
        # Create a database with a table.
        database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
        connection1 = database.connect()
        connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
        connection1.commit()

        # Put some data in, but also make a second connection to the database,
        # which will prevent a commit until it is closed.
        connection1.execute("INSERT INTO test VALUES (1)")
        connection2 = database.connect()
        connection2.execute("SELECT id FROM test")

        started = time.time()
        try:
            connection1.commit()
        except OperationalError as exception:
            self.assertEqual(str(exception), "database is locked")
            # In 0.10, the next assertion failed because the timeout wasn't
            # enforced for the "COMMIT" statement.
            self.assertTrue(time.time() - started >= 0.3)
        else:
            self.fail("OperationalError not raised")
Exemple #7
0
 def test_charset_option(self):
     uri = URI(os.environ["STORM_MYSQL_URI"])
     uri.options["charset"] = "ascii"
     database = create_database(uri)
     connection = database.connect()
     result = connection.execute("SELECT @@character_set_client")
     self.assertEquals(result.get_one(), ("ascii",))
    def test_recover_after_timeout(self):
        """Regression test for recovering from database locked exception.
        
        In 0.10, connection.commit() would forget that a transaction was in
        progress if an exception was raised, such as an OperationalError due to
        another connection being open.  As a result, a subsequent modification
        to the database would cause BEGIN to be issued to the database, which
        would complain that a transaction was already in progress.

        """
        # Create a database with a table.
        database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
        connection1 = database.connect()
        connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
        connection1.commit()

        # Put some data in, but also make a second connection to the database,
        # which will prevent a commit until it is closed.
        connection1.execute("INSERT INTO test VALUES (1)")
        connection2 = database.connect()
        connection2.execute("SELECT id FROM test")
        self.assertRaises(OperationalError, connection1.commit)

        # Close the second connection - it should now be possible to commit.
        connection2.close()

        # In 0.10, the next statement raised OperationalError: cannot start a
        # transaction within a transaction
        connection1.execute("INSERT INTO test VALUES (2)")
        connection1.commit()

        # Check that the correct data is present
        self.assertEqual(
            connection1.execute("SELECT id FROM test").get_all(), [(1, ),
                                                                   (2, )])
Exemple #9
0
    def test_commit_timeout(self):
        """Regression test for commit observing the timeout.
        
        In 0.10, the timeout wasn't observed for connection.commit().

        """
        # Create a database with a table.
        database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
        connection1 = database.connect()
        connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
        connection1.commit()

        # Put some data in, but also make a second connection to the database,
        # which will prevent a commit until it is closed.
        connection1.execute("INSERT INTO test VALUES (1)")
        connection2 = database.connect()
        connection2.execute("SELECT id FROM test")

        started = time.time()
        try:
            connection1.commit()
        except OperationalError, exception:
            self.assertEquals(str(exception), "database is locked")
            # In 0.10, the next assertion failed because the timeout wasn't
            # enforced for the "COMMIT" statement.
            self.assertTrue(time.time()-started >= 0.3)
Exemple #10
0
 def test_wb_create_database(self):
     database = create_database("mysql://*****:*****@ht:12/db?unix_socket=us")
     self.assertTrue(isinstance(database, MySQL))
     for key, value in [("db", "db"), ("host", "ht"), ("port", 12),
                        ("user", "un"), ("passwd", "pw"),
                        ("unix_socket", "us")]:
         self.assertEquals(database._connect_kwargs.get(key), value)
Exemple #11
0
 def test_charset_option(self):
     uri = URI(os.environ["STORM_MYSQL_URI"])
     uri.options["charset"] = "ascii"
     database = create_database(uri)
     connection = database.connect()
     result = connection.execute("SELECT @@character_set_client")
     self.assertEquals(result.get_one(), ("ascii", ))
Exemple #12
0
 def test_wb_create_database(self):
     database = create_database("mysql://*****:*****@ht:12/db?unix_socket=us")
     self.assertTrue(isinstance(database, MySQL))
     for key, value in [("db", "db"), ("host", "ht"), ("port", 12),
                        ("user", "un"), ("passwd", "pw"),
                        ("unix_socket", "us")]:
         self.assertEquals(database._connect_kwargs.get(key), value)
Exemple #13
0
def perform_data_update(dbfile):
    store = Store(create_database(GLSettings.make_db_uri(dbfile)))

    enabled_languages = [
        lang.name for lang in store.find(l10n.EnabledLanguage)
    ]

    removed_languages = list(
        set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES))

    if len(removed_languages):
        removed_languages.sort()
        removed_languages = ', '.join(removed_languages)
        raise Exception(
            "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n"
            "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop"
            % removed_languages)

    try:
        db_perform_data_update(store)
        store.commit()
    except:
        store.rollback()
        raise
    finally:
        store.close()
Exemple #14
0
    def test_isolation_read_committed(self):
        database = create_database(
            os.environ["STORM_POSTGRES_URI"] + "?isolation=read-committed")

        connection = database.connect()
        self.addCleanup(connection.close)

        result = connection.execute("SHOW TRANSACTION ISOLATION LEVEL")
        self.assertEquals(result.get_one()[0], u"read committed")

        connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")

        result = self.connection.execute("SELECT id FROM bin_test")
        # Data should not be there already
        self.assertEquals(result.get_all(), [])
        connection.rollback()

        # Start a transaction
        result = connection.execute("SELECT 1")
        self.assertEquals(result.get_one(), (1,))

        self.connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")
        self.connection.commit()

        result = connection.execute("SELECT id FROM bin_test")
        # Data is already here!
        self.assertEquals(result.get_one(), (1,))
        connection.rollback()
Exemple #15
0
    def test_isolation_read_committed(self):
        database = create_database(os.environ["STORM_POSTGRES_URI"] +
                                   "?isolation=read-committed")

        connection = database.connect()
        self.addCleanup(connection.close)

        result = connection.execute("SHOW TRANSACTION ISOLATION LEVEL")
        self.assertEquals(result.get_one()[0], u"read committed")

        connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")

        result = self.connection.execute("SELECT id FROM bin_test")
        # Data should not be there already
        self.assertEquals(result.get_all(), [])
        connection.rollback()

        # Start a transaction
        result = connection.execute("SELECT 1")
        self.assertEquals(result.get_one(), (1, ))

        self.connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")
        self.connection.commit()

        result = connection.execute("SELECT id FROM bin_test")
        # Data is already here!
        self.assertEquals(result.get_one(), (1, ))
        connection.rollback()
    def __init__(self, market_db):
        self.database = create_database('sqlite:' + market_db)
        self.store = MarketStore(self.database)

        with open(os.path.join(BASE_DIR, 'database', 'schema.sql')) as fp:
            schema = fp.read()
        for cmd in schema.split(';'):
            self.store.execute(cmd)
Exemple #17
0
 def _get_store(self):
     if self.store is not None:
         return self.store
     db_dir_path = os.path.join(self.path, "db")
     if not os.path.isdir(db_dir_path):
         os.mkdir(db_dir_path)
     db_path = os.path.join(db_dir_path, "hostdb.sqlite")
     db = create_database("sqlite:%s?timeout=%f" % (db_path, self.timeout))
     self.store = Store(db)
     setup_schema(self.store)
     return self.store
Exemple #18
0
 def _get_store(self):
     if self.store is not None:
         return self.store
     db_dir_path = os.path.join(self.path, "db")
     if not os.path.isdir(db_dir_path):
         os.mkdir(db_dir_path)
     db_path = os.path.join(db_dir_path, "hostdb.sqlite")
     db = create_database("sqlite:%s?timeout=%f" % (db_path, self.timeout))
     self.store = Store(db)
     setup_schema(self.store)
     return self.store
Exemple #19
0
 def is_supported(self):
     uri = os.environ.get("STORM_POSTGRES_URI")
     if not uri:
         return False
     global _max_prepared_transactions
     if _max_prepared_transactions is None:
         database = create_database(uri)
         connection = database.connect()
         result = connection.execute("SHOW MAX_PREPARED_TRANSACTIONS")
         _max_prepared_transactions = int(result.get_one()[0])
         connection.close()
     return _max_prepared_transactions > 0
    def setUp(self):
        TestHelper.setUp(self)

        # Allow classes with the same name in different tests to resolve
        # property path strings properly.
        SQLObjectBase._storm_property_registry.clear()

        self.store = Store(create_database("sqlite:"))

        class SQLObject(SQLObjectBase):
            @staticmethod
            def _get_store():
                return self.store

        self.SQLObject = SQLObject

        self.store.execute("CREATE TABLE person "
                           "(id INTEGER PRIMARY KEY, name TEXT, age INTEGER,"
                           " ts TIMESTAMP, delta INTERVAL,"
                           " address_id INTEGER)")
        self.store.execute("INSERT INTO person VALUES "
                           "(1, 'John Joe', 20, '2007-02-05 19:53:15',"
                           " '1 day, 12:34:56', 1)")
        self.store.execute("INSERT INTO person VALUES "
                           "(2, 'John Doe', 20, '2007-02-05 20:53:15',"
                           " '42 days 12:34:56.78', 2)")

        self.store.execute("CREATE TABLE address "
                           "(id INTEGER PRIMARY KEY, city TEXT)")
        self.store.execute("INSERT INTO address VALUES (1, 'Curitiba')")
        self.store.execute("INSERT INTO address VALUES (2, 'Sao Carlos')")

        self.store.execute("CREATE TABLE phone "
                           "(id INTEGER PRIMARY KEY, person_id INTEGER,"
                           "number TEXT)")
        self.store.execute("INSERT INTO phone VALUES (1, 2, '1234-5678')")
        self.store.execute("INSERT INTO phone VALUES (2, 1, '8765-4321')")
        self.store.execute("INSERT INTO phone VALUES (3, 2, '8765-5678')")

        self.store.execute("CREATE TABLE person_phone "
                           "(id INTEGER PRIMARY KEY, person_id INTEGER, "
                           "phone_id INTEGER)")
        self.store.execute("INSERT INTO person_phone VALUES (1, 2, 1)")
        self.store.execute("INSERT INTO person_phone VALUES (2, 2, 2)")
        self.store.execute("INSERT INTO person_phone VALUES (3, 1, 1)")

        class Person(self.SQLObject):
            _defaultOrder = "-Person.name"
            name = StringCol()
            age = IntCol()
            ts = UtcDateTimeCol()

        self.Person = Person
Exemple #21
0
 def is_supported(self):
     uri = os.environ.get("STORM_POSTGRES_URI")
     if not uri:
         return False
     global _max_prepared_transactions
     if _max_prepared_transactions is None:
         database = create_database(uri)
         connection = database.connect()
         result = connection.execute("SHOW MAX_PREPARED_TRANSACTIONS")
         _max_prepared_transactions = int(result.get_one()[0])
         connection.close()
     return _max_prepared_transactions > 0
Exemple #22
0
    def setUp(self):
        TestHelper.setUp(self)

        # Allow classes with the same name in different tests to resolve
        # property path strings properly.
        SQLObjectBase._storm_property_registry.clear()

        self.store = Store(create_database("sqlite:"))
        class SQLObject(SQLObjectBase):
            @staticmethod
            def _get_store():
                return self.store

        self.SQLObject = SQLObject

        self.store.execute("CREATE TABLE person "
                           "(id INTEGER PRIMARY KEY, name TEXT, age INTEGER,"
                           " ts TIMESTAMP, delta INTERVAL,"
                           " address_id INTEGER)")
        self.store.execute("INSERT INTO person VALUES "
                           "(1, 'John Joe', 20, '2007-02-05 19:53:15',"
                           " '1 day, 12:34:56', 1)")
        self.store.execute("INSERT INTO person VALUES "
                           "(2, 'John Doe', 20, '2007-02-05 20:53:15',"
                           " '42 days 12:34:56.78', 2)")

        self.store.execute("CREATE TABLE address "
                           "(id INTEGER PRIMARY KEY, city TEXT)")
        self.store.execute("INSERT INTO address VALUES (1, 'Curitiba')")
        self.store.execute("INSERT INTO address VALUES (2, 'Sao Carlos')")

        self.store.execute("CREATE TABLE phone "
                           "(id INTEGER PRIMARY KEY, person_id INTEGER,"
                           "number TEXT)")
        self.store.execute("INSERT INTO phone VALUES (1, 2, '1234-5678')")
        self.store.execute("INSERT INTO phone VALUES (2, 1, '8765-4321')")
        self.store.execute("INSERT INTO phone VALUES (3, 2, '8765-5678')")

        self.store.execute("CREATE TABLE person_phone "
                           "(id INTEGER PRIMARY KEY, person_id INTEGER, "
                           "phone_id INTEGER)")
        self.store.execute("INSERT INTO person_phone VALUES (1, 2, 1)")
        self.store.execute("INSERT INTO person_phone VALUES (2, 2, 2)")
        self.store.execute("INSERT INTO person_phone VALUES (3, 1, 1)")

        class Person(self.SQLObject):
            _defaultOrder = "-Person.name"
            name = StringCol()
            age = IntCol()
            ts = UtcDateTimeCol()

        self.Person = Person
Exemple #23
0
 def test_timeout(self):
     database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
     connection1 = database.connect()
     connection2 = database.connect()
     connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
     connection1.commit()
     connection1.execute("INSERT INTO test VALUES (1)")
     started = time.time()
     try:
         connection2.execute("INSERT INTO test VALUES (2)")
     except OperationalError, exception:
         self.assertEquals(str(exception), "database is locked")
         self.assertTrue(time.time()-started >= 0.3)
Exemple #24
0
 def __init__(self,db="sqlite"):
     uname="root"
     passw=""
     if db=="postgres":
         passw="root"
     elif db=="sqlite":
         expr="sqlite:"
     if db!="sqlite":
         expr="{db}://{usern}:{passw}@localhost/test".format(db=db,usern=uname,passw=passw)
     self.database = create_database(expr)
     self.store = Store(self.database)
     #self.store.execute("DROP TABLE users")
     self.store.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, login VARCHAR(8), userid INTEGER, projid INTEGER)")
Exemple #25
0
 def test_timeout(self):
     database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
     connection1 = database.connect()
     connection2 = database.connect()
     connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
     connection1.commit()
     connection1.execute("INSERT INTO test VALUES (1)")
     started = time.time()
     try:
         connection2.execute("INSERT INTO test VALUES (2)")
     except OperationalError, exception:
         self.assertEquals(str(exception), "database is locked")
         self.assertTrue(time.time() - started >= 0.3)
    def setUp(self):
        super(BuildFarmTestCase, self).setUp()
        self.path = tempfile.mkdtemp()

        for subdir in ["data", "data/upload", "data/oldrevs", "db", "web", "lcov", "lcov/data"]:
            os.mkdir(os.path.join(self.path, subdir))

        self.db_url = "sqlite:"+os.path.join(self.path, "db", "hostdb.sqlite")
        db = database.create_database(self.db_url)
        store = Store(db)
        setup_schema(store)
        store.commit()
        self.write_compilers([])
        self.write_hosts({})
Exemple #27
0
    def __init__(self):
        GladeDelegate.__init__(self,
                               gladefile="interface.ui",
                               delete_handler=self.quit_if_last)
        self.proxy = None
        self.db = create_database("sqlite:laps.sqlite")
        self.store = Store(self.db)

        self.race = self._check_race()
        self.race_proxy = self.add_proxy(self.race, self.race_widgets)

        self.register_validate_function(self._validation_changed)
        self._check_categories()
        self.setup_widgets()
Exemple #28
0
    def __init__(self):
        GladeDelegate.__init__(self,
                               gladefile="interface.ui",
                               delete_handler=self.quit_if_last)
        self.proxy = None
        self.db = create_database("sqlite:laps.sqlite")
        self.store = Store(self.db)

        self.race = self._check_race()
        self.race_proxy = self.add_proxy(self.race, self.race_widgets)

        self.register_validate_function(self._validation_changed)
        self._check_categories()
        self.setup_widgets()
Exemple #29
0
 def test_timeout(self):
     database = create_database("sqlite:%s?timeout=0.3" % self.get_path())
     connection1 = database.connect()
     connection2 = database.connect()
     connection1.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
     connection1.commit()
     connection1.execute("INSERT INTO test VALUES (1)")
     started = time.time()
     try:
         connection2.execute("INSERT INTO test VALUES (2)")
     except OperationalError as exception:
         assert ustr(exception) == "database is locked"
         assert time.time() - started >= 0.3
     else:
         self.fail("OperationalError not raised")
Exemple #30
0
 def _get_store_internal(self, dbname):
     from stoqlib.database.runtime import StoqlibStore
     uri = self._create_uri(dbname)
     try:
         self._log_connect(uri)
         store = StoqlibStore(create_database(uri))
     except OperationalError as e:
         log.info('OperationalError: %s' % e)
         raise DatabaseError(e.args[0])
     except Exception as e:
         value = sys.exc_info()[1]
         raise DatabaseError(
             _("Could not connect to %s database. The error message is "
               "'%s'. Please fix the connection settings you have set "
               "and try again.") % (DEFAULT_RDBMS, value))
     return store
Exemple #31
0
 def _get_store_internal(self, dbname):
     from stoqlib.database.runtime import StoqlibStore
     uri = self._create_uri(dbname)
     try:
         if uri.host == "":
             pair = test_local_database()
             if pair is None:
                 raise DatabaseError(
                     _("Could not find a database server on this computer"))
             uri.host = pair[0]
             uri.port = int(pair[1])
         self._log_connect(uri)
         store = StoqlibStore(create_database(uri))
     except OperationalError, e:
         log.info('OperationalError: %s' % e)
         raise DatabaseError(e.args[0])
Exemple #32
0
 def _get_store_internal(self, dbname):
     from stoqlib.database.runtime import StoqlibStore
     uri = self._create_uri(dbname)
     try:
         self._log_connect(uri)
         store = StoqlibStore(create_database(uri))
     except OperationalError as e:
         log.info('OperationalError: %s' % e)
         raise DatabaseError(e.args[0])
     except Exception as e:
         value = sys.exc_info()[1]
         raise DatabaseError(
             _("Could not connect to %s database. The error message is "
               "'%s'. Please fix the connection settings you have set "
               "and try again.") % (DEFAULT_RDBMS, value))
     return store
Exemple #33
0
 def setUp(self):
     super(PostgresDisconnectionTestWithPGBouncerBase, self).setUp()
     database_uri = URI(os.environ["STORM_POSTGRES_HOST_URI"])
     database_user = database_uri.username or os.environ['USER']
     database_dsn = make_dsn(database_uri)
     # Create a pgbouncer fixture.
     self.pgbouncer = pgbouncer.fixture.PGBouncerFixture()
     self.pgbouncer.databases[database_uri.database] = database_dsn
     self.pgbouncer.users[database_user] = "trusted"
     self.pgbouncer.admin_users = [database_user]
     self.useFixture(self.pgbouncer)
     # Create a Database that uses pgbouncer.
     pgbouncer_uri = database_uri.copy()
     pgbouncer_uri.host = self.pgbouncer.host
     pgbouncer_uri.port = self.pgbouncer.port
     self.database = create_database(pgbouncer_uri)
Exemple #34
0
 def setUp(self):
     super(PostgresDisconnectionTestWithPGBouncerBase, self).setUp()
     database_uri = URI(os.environ["STORM_POSTGRES_HOST_URI"])
     database_user = database_uri.username or os.environ['USER']
     database_dsn = make_dsn(database_uri)
     # Create a pgbouncer fixture.
     self.pgbouncer = pgbouncer.fixture.PGBouncerFixture()
     self.pgbouncer.databases[database_uri.database] = database_dsn
     self.pgbouncer.users[database_user] = "trusted"
     self.pgbouncer.admin_users = [database_user]
     self.useFixture(self.pgbouncer)
     # Create a Database that uses pgbouncer.
     pgbouncer_uri = database_uri.copy()
     pgbouncer_uri.host = self.pgbouncer.host
     pgbouncer_uri.port = self.pgbouncer.port
     self.database = create_database(pgbouncer_uri)
Exemple #35
0
    def test_isolation_autocommit(self):
        database = create_database(
            os.environ["STORM_POSTGRES_URI"] + "?isolation=autocommit")

        connection = database.connect()
        self.addCleanup(connection.close)

        result = connection.execute("SHOW TRANSACTION ISOLATION LEVEL")
        # It matches read committed in Postgres internel
        self.assertEquals(result.get_one()[0], u"read committed")

        connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")

        result = self.connection.execute("SELECT id FROM bin_test")
        # I didn't commit, but data should already be there
        self.assertEquals(result.get_all(), [(1,)])
        connection.rollback()
Exemple #36
0
    def test_isolation_autocommit(self):
        database = create_database(os.environ["STORM_POSTGRES_URI"] +
                                   "?isolation=autocommit")

        connection = database.connect()
        self.addCleanup(connection.close)

        result = connection.execute("SHOW TRANSACTION ISOLATION LEVEL")
        # It matches read committed in Postgres internel
        self.assertEquals(result.get_one()[0], u"read committed")

        connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")

        result = self.connection.execute("SELECT id FROM bin_test")
        # I didn't commit, but data should already be there
        self.assertEquals(result.get_all(), [(1, )])
        connection.rollback()
Exemple #37
0
def main():
    db = create_database("sqlite:laps.sqlite")
    store = Store(db)

    racers = store.find(Racer)
    print 'Categoria,Número,Nome,L1,L2,L3,L4,L5,L6,L7,L8,Total'
    for r in racers:
        data = [r.category.name, r.number, r.name]
        #print r.number, r.name
        for i, lap in enumerate(list(r.get_laps()), 1):
            assert i == lap.lap_number
            #print '  ', i, lap.lap_number, lap.lap_time, lap
            #data.append(str(lap.lap_time))
            data.append(lap.lap_time.seconds)

        data.extend([0] * (11 - len(data)))
        data.append(r.total_time)
        print ','.join(str(i) for i in data)
Exemple #38
0
    def setUp(self):
        super(BuildFarmTestCase, self).setUp()
        self.path = tempfile.mkdtemp()

        for subdir in [
                "data", "data/upload", "data/oldrevs", "db", "web", "lcov",
                "lcov/data"
        ]:
            os.mkdir(os.path.join(self.path, subdir))

        self.db_url = "sqlite:" + os.path.join(self.path, "db",
                                               "hostdb.sqlite")
        db = database.create_database(self.db_url)
        store = Store(db)
        setup_schema(store)
        store.commit()
        self.write_compilers([])
        self.write_hosts({})
Exemple #39
0
    def test_isolation_serializable(self):
        database = create_database(
            os.environ["STORM_POSTGRES_URI"] + "?isolation=serializable")

        connection = database.connect()
        self.addCleanup(connection.close)

        result = connection.execute("SHOW TRANSACTION ISOLATION LEVEL")
        self.assertEquals(result.get_one()[0], u"serializable")

        # Start a transaction
        result = connection.execute("SELECT 1")
        self.assertEquals(result.get_one(), (1,))

        self.connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")
        self.connection.commit()

        result = connection.execute("SELECT id FROM bin_test")
        # We can't see data yet, because transaction started before
        self.assertEquals(result.get_one(), None)
        connection.rollback()
Exemple #40
0
    def test_isolation_serializable(self):
        database = create_database(os.environ["STORM_POSTGRES_URI"] +
                                   "?isolation=serializable")

        connection = database.connect()
        self.addCleanup(connection.close)

        result = connection.execute("SHOW TRANSACTION ISOLATION LEVEL")
        self.assertEquals(result.get_one()[0], u"serializable")

        # Start a transaction
        result = connection.execute("SELECT 1")
        self.assertEquals(result.get_one(), (1, ))

        self.connection.execute("INSERT INTO bin_test VALUES (1, 'foo')")
        self.connection.commit()

        result = connection.execute("SELECT id FROM bin_test")
        # We can't see data yet, because transaction started before
        self.assertEquals(result.get_one(), None)
        connection.rollback()
Exemple #41
0
def perform_data_update(dbfile):
    store = Store(create_database(GLSettings.make_db_uri(dbfile)))

    enabled_languages = [lang.name for lang in store.find(l10n.EnabledLanguage)]

    removed_languages = list(set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES))

    if len(removed_languages):
        removed_languages.sort()
        removed_languages = ', '.join(removed_languages)
        raise Exception("FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n"
                        "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages)


    try:
        db_perform_data_update(store)
        store.commit()
    except:
        store.rollback()
        raise
    finally:
        store.close()
Exemple #42
0
 def _get_store_internal(self, dbname):
     from stoqlib.database.runtime import StoqlibStore
     uri = self._create_uri(dbname)
     try:
         if uri.host == "":
             pair = test_local_database()
             if pair is None:
                 raise DatabaseError(
                     _("Could not find a database server on this computer"))
             uri.host = pair[0]
             uri.port = int(pair[1])
         self._log_connect(uri)
         store = StoqlibStore(create_database(uri))
     except OperationalError as e:
         log.info('OperationalError: %s' % e)
         raise DatabaseError(e.args[0])
     except Exception as e:
         value = sys.exc_info()[1]
         raise DatabaseError(
             _("Could not connect to %s database. The error message is "
               "'%s'. Please fix the connection settings you have set "
               "and try again.") % (DEFAULT_RDBMS, value))
     return store
Exemple #43
0
    def _wrap(self, function, *args, **kwargs):
        """
        Wrap provided function calling it inside a thread and
        passing the store to it.
        """
        with transact_lock:
            store = Store(create_database(GLSettings.db_uri))

            try:
                if self.instance:
                    result = function(self.instance, store, *args, **kwargs)
                else:
                    result = function(store, *args, **kwargs)

                store.commit()
            except:
                store.rollback()
                raise
            else:
                return result
            finally:
                store.reset()
                store.close()
Exemple #44
0
 def _get_store_internal(self, dbname):
     from stoqlib.database.runtime import StoqlibStore
     uri = self._create_uri(dbname)
     try:
         if uri.host == "":
             pair = test_local_database()
             if pair is None:
                 raise DatabaseError(
                     _("Could not find a database server on this computer"))
             uri.host = pair[0]
             uri.port = int(pair[1])
         self._log_connect(uri)
         store = StoqlibStore(create_database(uri))
     except OperationalError as e:
         log.info('OperationalError: %s' % e)
         raise DatabaseError(e.args[0])
     except Exception as e:
         value = sys.exc_info()[1]
         raise DatabaseError(
             _("Could not connect to %s database. The error message is "
               "'%s'. Please fix the connection settings you have set "
               "and try again.") % (DEFAULT_RDBMS, value))
     return store
Exemple #45
0
 def test_wb_create_database(self):
     filename = self.make_path()
     database = create_database("sqlite:%s" % filename)
     self.assertTrue(isinstance(database, SQLite))
     self.assertEquals(database._filename, filename)
Exemple #46
0
 def test_wb_create_database(self):
     database = create_database("postgres://*****:*****@ht:12/db")
     self.assertTrue(isinstance(database, Postgres))
     self.assertEquals(database._dsn,
                       "dbname=db host=ht port=12 user=un password=pw")
Exemple #47
0
def get_store(database_uri):
    database = create_database(database_uri)
    store = Store(database)
    return store
Exemple #48
0
 def test_wb_create_database(self):
     database = create_database("sqlite:")
     self.assertTrue(isinstance(database, SQLite))
     self.assertEquals(database._filename, ":memory:")
Exemple #49
0
 def setUp(self):
     super(TimeoutTracerWithDBTest, self).setUp()
     self.tracer = StuckInTimeTimeoutTracer(10)
     install_tracer(self.tracer)
     database = create_database(os.environ["STORM_POSTGRES_URI"])
     self.connection = database.connect()
Exemple #50
0
 def test_wb_create_database(self):
     database = create_database("postgres://*****:*****@ht:12/db")
     self.assertTrue(isinstance(database, Postgres))
     self.assertEquals(database._dsn,
                       "dbname=db host=ht port=12 user=un password=pw")
Exemple #51
0
 def create_database(self):
     self.database = create_database(os.environ["STORM_POSTGRES_URI"])
Exemple #52
0
 def create_database(self):
     self.database = create_database(os.environ["STORM_POSTGRES_URI"])
Exemple #53
0
def get_store(database_uri):
	database = create_database(database_uri)
	store = Store(database)
	return store
Exemple #54
0
 def test_wb_create_database(self):
     filename = self.make_path()
     database = create_database("sqlite:%s" % filename)
     self.assertTrue(isinstance(database, SQLite))
     self.assertEqual(database._filename, filename)
Exemple #55
0
 def test_wb_create_database(self):
     database = create_database("sqlite:")
     self.assertTrue(isinstance(database, SQLite))
     self.assertEqual(database._filename, ":memory:")
Exemple #56
0
 def setUp(self):
     super(PostgresDisconnectionTestWithoutProxyBase, self).setUp()
     self.database = create_database(self.database_uri)
Exemple #57
0
def perform_schema_migration(version):
    """
    @param version:
    @return:
    """
    to_delete_on_fail = []
    to_delete_on_success = []

    if version < FIRST_DATABASE_VERSION_SUPPORTED:
        GLSettings.print_msg(
            "Migrations from DB version lower than %d are no longer supported!"
            % FIRST_DATABASE_VERSION_SUPPORTED)
        quit()

    tmpdir = os.path.abspath(os.path.join(GLSettings.db_path, 'tmp'))
    orig_db_file = os.path.abspath(
        os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version))
    final_db_file = os.path.abspath(
        os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION))

    shutil.rmtree(tmpdir, True)
    os.mkdir(tmpdir)
    shutil.copy2(orig_db_file, tmpdir)

    new_db_file = None

    try:
        while version < DATABASE_VERSION:
            old_db_file = os.path.abspath(
                os.path.join(tmpdir, 'glbackend-%d.db' % version))
            new_db_file = os.path.abspath(
                os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1)))

            GLSettings.db_file = new_db_file
            GLSettings.enable_input_length_checks = False

            to_delete_on_fail.append(new_db_file)
            to_delete_on_success.append(old_db_file)

            GLSettings.print_msg("Updating DB from version %d to version %d" %
                                 (version, version + 1))

            store_old = Store(create_database('sqlite:' + old_db_file))
            store_new = Store(create_database('sqlite:' + new_db_file))

            # Here is instanced the migration script
            MigrationModule = importlib.import_module(
                "globaleaks.db.migrations.update_%d" % (version + 1))
            migration_script = MigrationModule.MigrationScript(
                migration_mapping, version, store_old, store_new)

            GLSettings.print_msg("Migrating table:")

            try:
                try:
                    migration_script.prologue()
                except Exception as exception:
                    GLSettings.print_msg(
                        "Failure while executing migration prologue: %s" %
                        exception)
                    raise exception

                for model_name, _ in migration_mapping.iteritems():
                    if migration_script.model_from[
                            model_name] is not None and migration_script.model_to[
                                model_name] is not None:
                        try:
                            migration_script.migrate_model(model_name)

                            # Commit at every table migration in order to be able to detect
                            # the precise migration that may fail.
                            migration_script.commit()
                        except Exception as exception:
                            GLSettings.print_msg(
                                "Failure while migrating table %s: %s " %
                                (model_name, exception))
                            raise exception
                try:
                    migration_script.epilogue()
                    migration_script.commit()
                except Exception as exception:
                    GLSettings.print_msg(
                        "Failure while executing migration epilogue: %s " %
                        exception)
                    raise exception

            finally:
                # the database should be always closed before leaving the application
                # in order to not keep leaking journal files.
                migration_script.close()

            GLSettings.print_msg("Migration stats:")

            # we open a new db in order to verify integrity of the generated file
            store_verify = Store(
                create_database(GLSettings.make_db_uri(new_db_file)))

            for model_name, _ in migration_mapping.iteritems():
                if model_name == 'ApplicationData':
                    continue

                if migration_script.model_from[
                        model_name] is not None and migration_script.model_to[
                            model_name] is not None:
                    count = store_verify.find(
                        migration_script.model_to[model_name]).count()
                    if migration_script.entries_count[model_name] != count:
                        if migration_script.fail_on_count_mismatch[model_name]:
                            raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \
                                                 (model_name, count, migration_script.entries_count[model_name]))
                        else:
                            GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \
                                                 (model_name, migration_script.entries_count[model_name], count))
                    else:
                        GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \
                                             (model_name, migration_script.entries_count[model_name]))

            version += 1

            store_verify.close()

        perform_data_update(new_db_file)
    except Exception as exception:
        # simply propagate the exception
        raise exception

    else:
        # in case of success first copy the new migrated db, then as last action delete the original db file
        shutil.copy(new_db_file, final_db_file)
        security.overwrite_and_remove(orig_db_file)

    finally:
        # Always cleanup the temporary directory used for the migration
        for f in os.listdir(tmpdir):
            tmp_db_file = os.path.join(tmpdir, f)
            security.overwrite_and_remove(tmp_db_file)
        shutil.rmtree(tmpdir)
Exemple #58
0
 def setUp(self):
     super(PostgresDisconnectionTestWithoutProxyBase, self).setUp()
     self.database = create_database(self.database_uri)