def test_check_unmodifiable_strings(self): # This test case asserts that data migration updates unmodifiable l10n strings self._initStartDB(34) notification_l10n = NotificationL10NFactory(self.store) t0 = notification_l10n.get_val('export_template', 'it') notification_l10n.set_val('export_template', 'it', '') t1 = notification_l10n.get_val('export_template', 'it') self.assertEqual(t1, '') self.store.commit() # place a dummy version in the current db store = Store(create_database(GLSettings.db_uri)) prv = config.PrivateFactory(store) self.dummy_ver = '2.XX.XX' prv.set_val('version', self.dummy_ver) self.assertEqual(prv.get_val('version'), self.dummy_ver) store.commit() store.close() migration.perform_data_update(self.db_file) store = Store(create_database(GLSettings.db_uri)) notification_l10n = NotificationL10NFactory(store) t2 = notification_l10n.get_val('export_template', 'it') self.assertEqual(t2, t0) store.commit() store.close() shutil.rmtree(GLSettings.db_path)
def __init__(self, table_history, old_db_file, new_db_file, start_ver): self.table_history = table_history self.old_db_file = old_db_file self.new_db_file = new_db_file self.start_ver = start_ver self.std_fancy = " ł " self.debug_info = " [%d => %d] " % (start_ver, start_ver + 1) for k, v in table_history.iteritems(): length = DATABASE_VERSION + 1 - FIRST_DATABASE_VERSION_SUPPORTED if len(v) != length: msg = 'Expecting a table with {} statuses ({})'.format(length, k) raise TypeError(msg) log.msg('{} Opening old DB: {}'.format(self.debug_info, old_db_file)) old_database = create_database('sqlite:' + self.old_db_file) self.store_old = Store(old_database) GLSettings.db_file = new_db_file new_database = create_database('sqlite:' + new_db_file) self.store_new = Store(new_database) if self.start_ver + 1 == DATABASE_VERSION: log.msg('{} Acquire SQL schema {}'.format(self.debug_info, GLSettings.db_schema_file)) if not os.access(GLSettings.db_schema_file, os.R_OK): log.msg('Unable to access', GLSettings.db_schema_file) raise IOError('Unable to access db schema file') with open(GLSettings.db_schema_file) as f: create_queries = ''.join(f).split(';') for create_query in create_queries: try: self.store_new.execute(create_query + ';') except OperationalError: log.msg('OperationalError in "{}"'.format(create_query)) self.store_new.commit() return # return here and manage the migrant versions here: for k, v in self.table_history.iteritems(): create_query = self.get_right_sql_version(k, self.start_ver + 1) if not create_query: # table not present in the version continue try: self.store_new.execute(create_query + ';') except OperationalError as excep: log.msg('{} OperationalError in [{}]'.format(self.debug_info, create_query)) raise excep self.store_new.commit()
def setUp(self): super(PatchApplierTest, self).setUp() self.patchdir = self.makeDir() self.pkgdir = os.path.join(self.patchdir, "mypackage") os.makedirs(self.pkgdir) f = open(os.path.join(self.pkgdir, "__init__.py"), "w") f.write("shared_data = []") f.close() # Order of creation here is important to try to screw up the # patch ordering, as os.listdir returns in order of mtime (or # something). for pname, data in [("patch_380.py", patch_test_1), ("patch_42.py", patch_test_0)]: self.add_module(pname, data) sys.path.append(self.patchdir) self.filename = self.makeFile() self.uri = "sqlite:///%s" % self.filename self.store = Store(create_database(self.uri)) self.store.execute("CREATE TABLE patch " "(version INTEGER NOT NULL PRIMARY KEY)") self.assertFalse(self.store.get(Patch, (42))) self.assertFalse(self.store.get(Patch, (380))) import mypackage self.mypackage = mypackage self.patch_set = PatchSet(mypackage) # Create another connection just to keep track of the state of the # whole transaction manager. See the assertion functions below. self.another_store = Store(create_database("sqlite:")) self.another_store.execute("CREATE TABLE test (id INT)") self.another_store.commit() self.prepare_for_transaction_check() class Committer(object): def commit(committer): self.store.commit() self.another_store.commit() def rollback(committer): self.store.rollback() self.another_store.rollback() self.committer = Committer() self.patch_applier = PatchApplier(self.store, self.patch_set, self.committer)
def test_detect_and_fix_cfg_change(self): store = Store(create_database(GLSettings.db_uri)) ret = config.is_cfg_valid(store) self.assertFalse(ret) store.close() migration.perform_data_update(self.db_file) store = Store(create_database(GLSettings.db_uri)) prv = config.PrivateFactory(store) self.assertEqual(prv.get_val(u'version'), __version__) self.assertEqual(prv.get_val(u'xx_smtp_password'), self.dp) ret = config.is_cfg_valid(store) self.assertTrue(ret) store.close()
def setUp(self): helpers.init_glsettings_for_unit_tests() GLSettings.db_path = os.path.join(GLSettings.ramdisk_path, 'db_test') shutil.rmtree(GLSettings.db_path, True) os.mkdir(GLSettings.db_path) db_name = 'glbackend-%d.db' % DATABASE_VERSION db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'db', 'populated', db_name) shutil.copyfile(db_path, os.path.join(GLSettings.db_path, db_name)) self.db_file = os.path.join(GLSettings.db_path, db_name) GLSettings.db_uri = GLSettings.make_db_uri(self.db_file) # place a dummy version in the current db store = Store(create_database(GLSettings.db_uri)) prv = config.PrivateFactory(store) self.dummy_ver = '2.XX.XX' prv.set_val(u'version', self.dummy_ver) self.assertEqual(prv.get_val(u'version'), self.dummy_ver) store.commit() store.close() # backup various mocks that we will use self._bck_f = config.is_cfg_valid GLConfig['private']['xx_smtp_password'] = GLConfig['private'].pop( 'smtp_password') self.dp = u'yes_you_really_should_change_me'
def on_abm_productos_clicked(self): DATABASE = create_database('sqlite:abm.db') almacen = Store(DATABASE) cm = ClienteManager(productos, reset=False) self.productos = AddProductos(manager=cm, managers=[]) #self.cliente.setWindowIcon( self.btLibroDiario.icon() ) self.productos.show()
def openDB(self): try: self._config = ConfigManager() self.db = self._config.config[self._config.database]["database"] create_db = False if self.db == self._config.Sqlite: folder = self._config.config[self._config.database]["folder"] loc = folder + '/icepapcms.db' print("Using Sqlite database at %s" % loc) create_db = not os.path.exists(loc) if create_db: print("No database file found, creating it") if not os.path.exists(folder): os.mkdir(folder) self._database = create_database("%s:%s" % (self.db, loc)) else: server = self._config.config[self._config.database]["server"] user = self._config.config[self._config.database]["user"] pwd = self._config.config[self._config.database]["password"] scheme = "{}://{}:{}@{}/icepapcms".format( self.db, user, pwd, server) if self.db == 'mysql': self._database = MySQL(scheme) else: self._database = create_database(scheme) self._store = Store(self._database) if create_db: self.dbOK = self.createSqliteDB() else: self.dbOK = True except Exception as e: self.log.error("Unexpected error on openDB: %s", e) self.dbOK = False
def postconditions_34(self): store = Store(create_database(GLSettings.db_uri)) notification_l10n = NotificationL10NFactory(store) x = notification_l10n.get_val(u'export_template', u'it') self.assertNotEqual(x, 'unmodifiable') store.commit() store.close()
def insert_data(self): """ Return the SQL syntax needed to insert the data already present in the table. """ store = Store(create_database(config.Database().uri)) registers = [] rows = store.find(self.model.__class__) fields = [ r._detect_attr_name(self.model.__class__) for r in self.model._storm_columns.keys() ] for r in rows: tmp_row = {} for field in fields: tmp_row[field] = getattr(r, field) registers.append(tmp_row) if self.__class__.__name__ == 'MySQL': commas = '`' else: commas = "'" query = '' for register in registers: query += ('INSERT INTO {}{}{} ({}) VALUES ({});\n'.format( commas, self.model.__storm_table__, commas, ', '.join(register.keys()), ', '.join([(str(field) if type(field) is not unicode else "'{}'".format(field)) for field in register.values()]))) return query
def test_mig_37_valid_tor_hs_key(self): self._initStartDB(36) from globaleaks.db.migrations import update_37 t = update_37.TOR_DIR update_37.TOR_DIR = GLSettings.db_path pk_path = os.path.join(update_37.TOR_DIR, 'private_key') hn_path = os.path.join(update_37.TOR_DIR, 'hostname') shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/private_key'), pk_path) shutil.copy(os.path.join(helpers.DATA_DIR, 'tor/hostname'), hn_path) ret = update_db() self.assertEqual(ret, None) new_uri = GLSettings.make_db_uri( os.path.join(GLSettings.db_path, GLSettings.db_file_name)) store = Store(create_database(new_uri)) hs = config.NodeFactory(store).get_val('onionservice') pk = config.PrivateFactory(store).get_val('tor_onion_key') self.assertEqual('lftx7dbyvlc5txtl.onion', hs) with open(os.path.join(helpers.DATA_DIR, 'tor/ephemeral_service_key')) as f: saved_key = f.read().strip() self.assertEqual(saved_key, pk) store.close() shutil.rmtree(GLSettings.db_path) update_37.TOR_DIR = t
def getStore(): """ I return a store object """ database = getDB() store = Store(database) return store
def test_make_commits_transaction_once(self): """ L{ZStormResourceManager.make} commits schema changes only once across all stores, after all patch and delete statements have been executed. """ database2 = { "name": "test2", "uri": "sqlite:///%s" % self.makeFile(), "schema": self.databases[0]["schema"] } self.databases.append(database2) other_store = Store(create_database(database2["uri"])) for store in [self.store, other_store]: store.execute("CREATE TABLE patch " "(version INTEGER NOT NULL PRIMARY KEY)") store.execute("CREATE TABLE test (foo TEXT)") store.execute("INSERT INTO test (foo) VALUES ('data')") store.commit() with CaptureTracer() as tracer: zstorm = self.resource.make([]) self.assertEqual(["COMMIT", "COMMIT"], tracer.queries[-2:]) store1 = zstorm.get("test") store2 = zstorm.get("test2") self.assertEqual([], list(store1.execute("SELECT foo FROM test"))) self.assertEqual([], list(store2.execute("SELECT foo FROM test")))
def setup_request(): # Read the configuration. stream = open('%s/config.yaml' % request.environ['UNISON_ROOT']) g.config = yaml.load(stream) # Set up the database. database = create_database(g.config['database']['string']) g.store = Store(database)
def test_version_change_success(self): migration.perform_data_update(self.db_file) store = Store(create_database(GLSettings.db_uri)) prv = config.PrivateFactory(store) self.assertEqual(prv.get_val(u'version'), __version__) store.close()
def preconditions_34(self): store = Store(create_database(self.start_db_uri)) notification_l10n = NotificationL10NFactory(store) notification_l10n.set_val(u'export_template', u'it', 'unmodifiable') x = notification_l10n.get_val(u'export_template', u'it') self.assertTrue(x, 'unmodifiable') store.commit() store.close()
def _checkValidDatabase(self, storage): '''Checks the Store to make sure it has a valid database''' store = Store(storage) for table in SCHEMA.iterkeys(): result = store.execute('SELECT * FROM `%s`' % table.lower()) self.assertEqual(result.get_all(), []) return True
def test_migration_error_with_removed_language(self): store = Store(create_database(GLSettings.db_uri)) zyx = EnabledLanguage('zyx') store.add(zyx) store.commit() store.close() self.assertRaises(Exception, migration.perform_data_update, self.db_file)
def connection(): """Returns database connection.""" try: if not flask.g.db: raise AttributeError() except (KeyError, AttributeError): flask.g.db = Store( create_database(flask.current_app.config['STORM_DATABASE_URI'])) return flask.g.db
def connect(self): opts = Config() self.database = create_database('mysql://' + opts.db_user_out + ':' + opts.db_password_out + '@' + opts.db_hostname_out + ':' + opts.db_port_out + '/' + opts.db_database_out) self.store = Store(self.database)
def perform_data_update(dbfile): new_tmp_store = Store(create_database(GLSettings.make_db_uri(dbfile))) try: db_perform_data_update(new_tmp_store) new_tmp_store.commit() except: new_tmp_store.rollback() raise finally: new_tmp_store.close()
def setUp(self, dsn=conf.LITE_DSN): """should load the dataset""" from storm.uri import URI from storm.locals import create_database, Store from storm.tracer import debug #debug(1) self.store = Store(create_database(URI(dsn))) self.fixture.store = self.store setup_db(self.store)
def test_ver_change_exception(self): # Explicity throw an exception in managed_ver_update via is_cfg_valid config.is_cfg_valid = apply_gen(throw_excep) self.assertRaises(IOError, migration.perform_data_update, self.db_file) store = Store(create_database(GLSettings.db_uri)) prv = config.PrivateFactory(store) self.assertEqual(prv.get_val(u'version'), self.dummy_ver) store.close()
def do_statspollute(dbfile): # source gl_database = create_database("sqlite:%s" % dbfile) source_store = Store(gl_database) stats = source_store.find(models.Stats) counter = 0 for s in stats: source_store.remove(s) counter += 1 print "removed %d entry in stats" % counter counter = 0 # 21 days in the past for past_hours in xrange(24 * 7 * 3): past_hours += 4 when = utc_past_date(hours=past_hours) newstat = models.Stats() newstat.freemb = randint(1000, 1050) newstat.year = when.isocalendar()[0] newstat.week = when.isocalendar()[1] level = round((randint(0, 1000) / 240.0), 1) - 2 def random_pollution(): return int(randint(0,11) + (5 * level)) activity_fake = { 'successfull_logins': random_pollution(), 'failed_logins': random_pollution(), 'started_submissions': random_pollution(), 'completed_submissions': random_pollution(), 'uploaded_files': int(randint(0,11) + (5 * level)), 'appended_files': random_pollution(), 'wb_comments': random_pollution(), 'wb_messages': random_pollution(), 'receiver_comments': random_pollution(), 'receiver_messages': random_pollution() } for k, v in activity_fake.iteritems(): if v < 0: activity_fake[k] = 0 newstat.start = when newstat.summary = activity_fake counter += 1 source_store.add(newstat) print "Committing %d stats" % counter source_store.commit()
def db(): tid = threading.current_thread() try: return _dbpool[tid] except KeyError: database = create_database("sqlite:graph.db") store = Store(database) _dbpool[tid] = store return store
def test_create(self): """ L{Schema.create} can be used to create the tables of a L{Store}. """ self.assertRaises(StormError, self.store.execute, "SELECT * FROM person") self.schema.create(self.store) self.assertEquals(list(self.store.execute("SELECT * FROM person")), []) # By default changes are committed store2 = Store(self.database) self.assertEquals(list(store2.execute("SELECT * FROM person")), [])
def connect(connection_string): '''Connects to a given database and prepares it's structure.''' database = create_database(connection_string) ActiveRecord.store = Store(database) for type in ActiveRecord.subclasses: schema = type.table_schema if isinstance(schema, str) or isinstance(schema, unicode): ActiveRecord.store.execute(type.table_schema) else: for statement in schema: ActiveRecord.store.execute(statement)
def set_config(self, **kwargs): """Set the configuration of this back-end.""" uri = kwargs['uri'] database = create_database(uri) self.store = Store(database) self.logger = logging.getLogger('StormStorageBackend') handler = logging.StreamHandler() formatter = logging.Formatter(kwargs['log_format']) handler.setFormatter(formatter) self.logger.addHandler(handler) self.logger.setLevel( logging.__getattribute__(kwargs['log_level']))
def test_version_change_not_ok(self): # Set is_config_valid to false during managed ver update config.is_cfg_valid = apply_gen(mod_bool) self.assertRaises(Exception, migration.perform_data_update, self.db_file) # Ensure the rollback has succeeded store = Store(create_database(GLSettings.db_uri)) prv = config.PrivateFactory(store) self.assertEqual(prv.get_val(u'version'), self.dummy_ver) store.close()
def postconditions_36(self): new_uri = GLSettings.make_db_uri( os.path.join(GLSettings.db_path, GLSettings.db_file_name)) store = Store(create_database(new_uri)) hs = config.NodeFactory(store).get_val(u'onionservice') pk = config.PrivateFactory(store).get_val(u'tor_onion_key') self.assertEqual('lftx7dbyvlc5txtl.onion', hs) with open(os.path.join(helpers.DATA_DIR, 'tor/ephemeral_service_key')) as f: saved_key = f.read().strip() self.assertEqual(saved_key, pk) store.close()
def test_trim_value_to_range(self): store = Store(create_database(GLSettings.db_uri)) nf = config.NodeFactory(store) fake_cfg = nf.get_cfg(u'wbtip_timetolive') self.assertRaises(errors.InvalidModelInput, fake_cfg.set_v, 3650) fake_cfg.value = {'v': 3650} store.commit() MigrationBase.trim_value_to_range(nf, u'wbtip_timetolive') self.assertEqual(nf.get_val(u'wbtip_timetolive'), 365 * 2)