def create(self, name, uri=None): """Create a new store called C{name}. @param uri: Optionally, the URI to use. @raises ZStormError: Raised if C{uri} is None and no default URI exists for C{name}. Also raised if a store with C{name} already exists. """ if uri is None: database = self._default_databases.get(name) if database is None: raise ZStormError("Store named '%s' not found" % name) else: database = self._get_database(uri) if name is not None and self._named.get(name) is not None: raise ZStormError("Store named '%s' already exists" % name) store = Store(database) store._register_for_txn = True store._tpc = self._default_tpcs.get(name, False) store._event.hook("register-transaction", register_store_with_transaction, weakref.ref(self)) self._stores[id(store)] = store if name is not None: self._named[name] = store self._name_index[store] = name return store
def perform_data_update(dbfile): store = Store(create_database(GLSettings.make_db_uri(dbfile))) enabled_languages = [ lang.name for lang in store.find(l10n.EnabledLanguage) ] removed_languages = list( set(enabled_languages) - set(LANGUAGES_SUPPORTED_CODES)) if len(removed_languages): removed_languages.sort() removed_languages = ', '.join(removed_languages) raise Exception( "FATAL: cannot complete the upgrade because the support for some of the enabled languages is currently incomplete (%s)\n" "Read about how to handle this condition at: https://github.com/globaleaks/GlobaLeaks/wiki/Upgrade-Guide#lang-drop" % removed_languages) try: db_perform_data_update(store) store.commit() except: store.rollback() raise finally: store.close()
class UserStore(borg.Borg): try: _store = Store(Database().get_database()) except DatabaseException: _store = None def get_store(self): return self._store
def _get_store(self): if self.store is not None: return self.store db_dir_path = os.path.join(self.path, "db") if not os.path.isdir(db_dir_path): os.mkdir(db_dir_path) db_path = os.path.join(db_dir_path, "hostdb.sqlite") db = create_database("sqlite:%s?timeout=%f" % (db_path, self.timeout)) self.store = Store(db) setup_schema(self.store) return self.store
def test_block_access_with_multiple_stores(self): """ If multiple L{Store}s are passed to L{block_access} they will all be blocked until the managed context is left. """ database = SQLite(URI("sqlite:%s" % self.make_path())) store = Store(database) with block_access(self.store, store): self.assertRaises(ConnectionBlockedError, self.store.execute, "SELECT 1") self.assertRaises(ConnectionBlockedError, store.execute, "SELECT 1")
def setUp(self): TestHelper.setUp(self) # Allow classes with the same name in different tests to resolve # property path strings properly. SQLObjectBase._storm_property_registry.clear() self.store = Store(create_database("sqlite:")) class SQLObject(SQLObjectBase): @staticmethod def _get_store(): return self.store self.SQLObject = SQLObject self.store.execute("CREATE TABLE person " "(id INTEGER PRIMARY KEY, name TEXT, age INTEGER," " ts TIMESTAMP, delta INTERVAL," " address_id INTEGER)") self.store.execute("INSERT INTO person VALUES " "(1, 'John Joe', 20, '2007-02-05 19:53:15'," " '1 day, 12:34:56', 1)") self.store.execute("INSERT INTO person VALUES " "(2, 'John Doe', 20, '2007-02-05 20:53:15'," " '42 days 12:34:56.78', 2)") self.store.execute("CREATE TABLE address " "(id INTEGER PRIMARY KEY, city TEXT)") self.store.execute("INSERT INTO address VALUES (1, 'Curitiba')") self.store.execute("INSERT INTO address VALUES (2, 'Sao Carlos')") self.store.execute("CREATE TABLE phone " "(id INTEGER PRIMARY KEY, person_id INTEGER," "number TEXT)") self.store.execute("INSERT INTO phone VALUES (1, 2, '1234-5678')") self.store.execute("INSERT INTO phone VALUES (2, 1, '8765-4321')") self.store.execute("INSERT INTO phone VALUES (3, 2, '8765-5678')") self.store.execute("CREATE TABLE person_phone " "(id INTEGER PRIMARY KEY, person_id INTEGER, " "phone_id INTEGER)") self.store.execute("INSERT INTO person_phone VALUES (1, 2, 1)") self.store.execute("INSERT INTO person_phone VALUES (2, 2, 2)") self.store.execute("INSERT INTO person_phone VALUES (3, 1, 1)") class Person(self.SQLObject): _defaultOrder = "-Person.name" name = StringCol() age = IntCol() ts = UtcDateTimeCol() self.Person = Person
def __init__(self): GladeDelegate.__init__(self, gladefile="interface.ui", delete_handler=self.quit_if_last) self.proxy = None self.db = create_database("sqlite:laps.sqlite") self.store = Store(self.db) self.race = self._check_race() self.race_proxy = self.add_proxy(self.race, self.race_widgets) self.register_validate_function(self._validation_changed) self._check_categories() self.setup_widgets()
def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.rootAgent = server.localAgents[0] self.userAgent = server.localAgents[1] self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerAgents.append( AgentListener('load', self.onAgentRegister, self.onAgentDisconnect)) self.loadAgents = {} self.resourceManager = ResourceManager(self.dbStore)
def createExpsvcDB(connString): database = create_database(connString) store = Store(database) TableSchema(database, Agent).create(store) TableSchema(database, AgentResource).create(store) TableSchema(database, AgentResourceChild).create(store) TableSchema(database, WorkloadType).create(store) TableSchema(database, WorkloadParam).create(store) TableSchema(database, ExperimentProfile).create(store) TableSchema(database, ExperimentThreadPool).create(store) TableSchema(database, ExperimentWorkload).create(store) TableSchema(database, ExperimentWorkloadResource).create(store) store.commit() store.close()
def connect(self): global storm_stores if hasattr(cherrypy.thread_data, "store"): return dburi = config.get('storm.dburi') database = create_database(dburi) try: local_store = Store(database) except Exception: log.error("Unable to connect to database: %s" % dburi) cherrypy.engine.exit() return storm_stores.append(local_store) cherrypy.thread_data.store = local_store
def __init__(self, server, connString): TSLocalAgent.__init__(self, server) self.client.getId() self.logger = logging.getLogger('UserAgent') self.rootAgent = server.localAgents[0] self.agentUsers = {} self.authServices = {'local': LocalAuth()} self.database = create_database(connString) self.dbStore = Store(self.database) self.server.listenerFlows.append( Flow(dstAgentId=userAgentId, command='authUser'))
def main(): db = create_database("sqlite:laps.sqlite") store = Store(db) racers = store.find(Racer) print 'Categoria,Número,Nome,L1,L2,L3,L4,L5,L6,L7,L8,Total' for r in racers: data = [r.category.name, r.number, r.name] #print r.number, r.name for i, lap in enumerate(list(r.get_laps()), 1): assert i == lap.lap_number #print ' ', i, lap.lap_number, lap.lap_time, lap #data.append(str(lap.lap_time)) data.append(lap.lap_time.seconds) data.extend([0] * (11 - len(data))) data.append(r.total_time) print ','.join(str(i) for i in data)
def connect_db(thread_index): global storm_stores dburi = config.get('storm.dburi') database = create_database(dburi) try: local_store = Store(database) except Exception: log.error("Unable to connect to database: %s" % dburi) cherrypy.engine.exit() return timezone = config.get("storm.timezone") if timezone: local_store.execute(SQL("SET time_zone=?", (timezone, ))); storm_stores[thread_index] = local_store cherrypy.thread_data.store = local_store
def setUp(self): super(BuildFarmTestCase, self).setUp() self.path = tempfile.mkdtemp() for subdir in [ "data", "data/upload", "data/oldrevs", "db", "web", "lcov", "lcov/data" ]: os.mkdir(os.path.join(self.path, subdir)) self.db_url = "sqlite:" + os.path.join(self.path, "db", "hostdb.sqlite") db = database.create_database(self.db_url) store = Store(db) setup_schema(store) store.commit() self.write_compilers([]) self.write_hosts({})
def test_json_property(self): """The JSON property is encoded as JSON""" class TestModel(object): __storm_table__ = "json_test" id = Int(primary=True) json = JSON() connection = self.database.connect() value = {"a": 3, "b": "foo", "c": None} db_value = json.dumps(value).decode("utf-8") connection.execute("INSERT INTO json_test (json) VALUES (?)", (db_value, )) connection.commit() store = Store(self.database) obj = store.find(TestModel).one() store.close() # The JSON object is decoded to python self.assertEqual(value, obj.json)
def _wrap(self, function, *args, **kwargs): """ Wrap provided function calling it inside a thread and passing the store to it. """ with transact_lock: store = Store(create_database(GLSettings.db_uri)) try: if self.instance: result = function(self.instance, store, *args, **kwargs) else: result = function(store, *args, **kwargs) store.commit() except: store.rollback() raise else: return result finally: store.reset() store.close()
def perform_schema_migration(version): """ @param version: @return: """ to_delete_on_fail = [] to_delete_on_success = [] if version < FIRST_DATABASE_VERSION_SUPPORTED: GLSettings.print_msg( "Migrations from DB version lower than %d are no longer supported!" % FIRST_DATABASE_VERSION_SUPPORTED) quit() tmpdir = os.path.abspath(os.path.join(GLSettings.db_path, 'tmp')) orig_db_file = os.path.abspath( os.path.join(GLSettings.db_path, 'glbackend-%d.db' % version)) final_db_file = os.path.abspath( os.path.join(GLSettings.db_path, 'glbackend-%d.db' % DATABASE_VERSION)) shutil.rmtree(tmpdir, True) os.mkdir(tmpdir) shutil.copy2(orig_db_file, tmpdir) new_db_file = None try: while version < DATABASE_VERSION: old_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % version)) new_db_file = os.path.abspath( os.path.join(tmpdir, 'glbackend-%d.db' % (version + 1))) GLSettings.db_file = new_db_file GLSettings.enable_input_length_checks = False to_delete_on_fail.append(new_db_file) to_delete_on_success.append(old_db_file) GLSettings.print_msg("Updating DB from version %d to version %d" % (version, version + 1)) store_old = Store(create_database('sqlite:' + old_db_file)) store_new = Store(create_database('sqlite:' + new_db_file)) # Here is instanced the migration script MigrationModule = importlib.import_module( "globaleaks.db.migrations.update_%d" % (version + 1)) migration_script = MigrationModule.MigrationScript( migration_mapping, version, store_old, store_new) GLSettings.print_msg("Migrating table:") try: try: migration_script.prologue() except Exception as exception: GLSettings.print_msg( "Failure while executing migration prologue: %s" % exception) raise exception for model_name, _ in migration_mapping.iteritems(): if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: try: migration_script.migrate_model(model_name) # Commit at every table migration in order to be able to detect # the precise migration that may fail. migration_script.commit() except Exception as exception: GLSettings.print_msg( "Failure while migrating table %s: %s " % (model_name, exception)) raise exception try: migration_script.epilogue() migration_script.commit() except Exception as exception: GLSettings.print_msg( "Failure while executing migration epilogue: %s " % exception) raise exception finally: # the database should be always closed before leaving the application # in order to not keep leaking journal files. migration_script.close() GLSettings.print_msg("Migration stats:") # we open a new db in order to verify integrity of the generated file store_verify = Store( create_database(GLSettings.make_db_uri(new_db_file))) for model_name, _ in migration_mapping.iteritems(): if model_name == 'ApplicationData': continue if migration_script.model_from[ model_name] is not None and migration_script.model_to[ model_name] is not None: count = store_verify.find( migration_script.model_to[model_name]).count() if migration_script.entries_count[model_name] != count: if migration_script.fail_on_count_mismatch[model_name]: raise AssertionError("Integrity check failed on count equality for table %s: %d != %d" % \ (model_name, count, migration_script.entries_count[model_name])) else: GLSettings.print_msg(" * %s table migrated (entries count changed from %d to %d)" % \ (model_name, migration_script.entries_count[model_name], count)) else: GLSettings.print_msg(" * %s table migrated (%d entry(s))" % \ (model_name, migration_script.entries_count[model_name])) version += 1 store_verify.close() perform_data_update(new_db_file) except Exception as exception: # simply propagate the exception raise exception else: # in case of success first copy the new migrated db, then as last action delete the original db file shutil.copy(new_db_file, final_db_file) security.overwrite_and_remove(orig_db_file) finally: # Always cleanup the temporary directory used for the migration for f in os.listdir(tmpdir): tmp_db_file = os.path.join(tmpdir, f) security.overwrite_and_remove(tmp_db_file) shutil.rmtree(tmpdir)
def get_store(database_uri): database = create_database(database_uri) store = Store(database) return store
def setUp(self): super(BlockAccessTest, self).setUp() database = SQLite(URI("sqlite:")) self.store = Store(database)
def get_store(self): """Generate a instance of the Store object""" return Store(create_database(self.uri_string))
def get_store(): return Store(create_database(GLSettings.db_uri))
def memory_store(): db = create_database("sqlite:") store = Store(db) setup_schema(store) return store