def test_store_from_config_should_not_cache_when_use_cache_is_False(self): f = tempfile.NamedTemporaryFile() print >> f, "{}" f.flush() store1 = M.store_from_config(f.name) store2 = M.store_from_config(f.name, use_cache=False) ok_(store1 is not store2, 'store1 and store2 are same')
def test_store_from_config_should_cache_store_objects(self): f = tempfile.NamedTemporaryFile() print >> f, "{}" f.flush() store1 = M.store_from_config(f.name) store2 = M.store_from_config(f.name) ok_(store1 is store2, 'store1 and store2 are not same')
def test_store_from_config_should_cache_store_objects(self): f = tempfile.NamedTemporaryFile() print >>f, "{}" f.flush() store1 = M.store_from_config(f.name) store2 = M.store_from_config(f.name) ok_(store1 is store2, 'store1 and store2 are not same')
def test_store_from_config_should_not_cache_when_use_cache_is_False(self): f = tempfile.NamedTemporaryFile() print >>f, "{}" f.flush() store1 = M.store_from_config(f.name) store2 = M.store_from_config(f.name, use_cache=False) ok_(store1 is not store2, 'store1 and store2 are same')
def prepare_store(self, use_cache=False, created_via='test_sqlstore', **kwargs): return M.store_from_config(self.database, use_cache=use_cache, created_via=created_via, **kwargs)
def test_push_config(self): farm1_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore1', 'user': '******', 'passwd': 'sqlstore', } farm1_new_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore3', 'user': '******', 'passwd': 'sqlstore', } farm2_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore2', 'user': '******', 'passwd': 'sqlstore', } farm3_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore3', 'user': '******', 'passwd': 'sqlstore', } store = M.store_from_config(self.database) farm1 = store.get_farm('farm1') eq_(farm1.dbcnf, farm1_dbconf) farm2 = store.get_farm('farm2') eq_(farm2.dbcnf, farm2_dbconf) # push new config store.receive_conf(str(self.database_new_config)) # farm1 should change _farm1 = store.get_farm('farm1') eq_(_farm1.dbcnf, farm1_new_dbconf) ok_(_farm1 is not farm1) # farm2 should be re-used _farm2 = store.get_farm('farm2') eq_(_farm2.dbcnf, farm2_dbconf) ok_(_farm2 is farm2) # farm3 should be created farm3 = store.get_farm('farm3') eq_(farm3.dbcnf, farm3_dbconf)
def test_push_config(self): farm1_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore1', 'user': '******', 'passwd': 'sqlstore', } farm1_new_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore3', 'user': '******', 'passwd': 'sqlstore', } farm2_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore2', 'user': '******', 'passwd': 'sqlstore', } farm3_dbconf = { 'host': '127.0.0.1', 'port': 3306, 'db': 'test_sqlstore3', 'user': '******', 'passwd': 'sqlstore', } store = M.store_from_config(self.database) farm1 = store.get_farm('farm1') eq_(farm1.dbcnf, farm1_dbconf) farm2 = store.get_farm('farm2') eq_(farm2.dbcnf, farm2_dbconf) # push new config store.receive_conf(str(self.database_new_config)) # farm1 should change _farm1 = store.get_farm('farm1') eq_(_farm1.dbcnf, farm1_new_dbconf) ok_(_farm1 is not farm1) # farm2 should be re-used _farm2 = store.get_farm('farm2') eq_(_farm2.dbcnf, farm2_dbconf) ok_(_farm2 is farm2) # farm3 should be created farm3 = store.get_farm('farm3') eq_(farm3.dbcnf, farm3_dbconf)
def test_pickle(self): import pickle store = M.store_from_config('test-online', use_cache=False) buf = pickle.dumps(store) _store = pickle.loads(buf) eq_(store.db_config, _store.db_config) eq_(store.db_config_name, _store.db_config_name) eq_(len(store.farms), len(_store.farms)) M.replace_sqlstore_config('test-online', 'test-offline') eq_(store.db_config_name, 'test-online') _store_2 = pickle.loads(buf) eq_(_store_2.db_config_name, 'test-offline') eq_(store.get_farm('farm1').dbcnf['db'], 'test_sqlstore1')
def test_pickle(self): import pickle store = M.store_from_config('test-online', use_cache=False) buf = pickle.dumps(store) _store = pickle.loads(buf) eq_(store.db_config, _store.db_config) eq_(store.db_config_name, _store.db_config_name) eq_(len(store.farms), len(_store.farms)) M.replace_sqlstore_config('test-online', 'test-offline') eq_(store.db_config_name, 'test-online') _store_2 = pickle.loads(buf) eq_(_store_2.db_config_name, 'test-offline') eq_(store.get_farm('farm1').dbcnf['db'], 'test_sqlstore1')
def test_store_from_config_should_accept_dict_as_parameter(self): M.store_from_config({})
def test_replace(self): M.replace_sqlstore_config('test-online', 'test-offline') store = M.store_from_config('test-online', use_cache=False) eq_(store.db_config_name, 'test-offline') eq_(store.get_farm('farm1').dbcnf['db'], 'test_sqlstore2')
DATABASE = { 'farms': { "luz_farm": { "master": "localhost:test_vagrant9010:eye:sauron", "tables": ["*"], }, }, 'options': { 'show_warnings': True, } } from unittest import TestCase store = store_from_config(DATABASE) mc.clear() cursor = store.get_cursor() cursor.delete_without_where = True cursor.execute('''DROP TABLE IF EXISTS `test_t`''') cursor.execute(''' CREATE TABLE `test_t` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `subject_id` int(10) unsigned NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uk_subject` (`subject_id`)) ENGINE=MEMORY AUTO_INCREMENT=1''' ) cursor.execute('''DROP TABLE IF EXISTS `test_a`''') cursor.execute(''' CREATE TABLE `test_a`
def get_mc(): return mc_from_config(MEMCACHED, use_cache=False) def stub_cache(*args, **kws): pass mc = get_mc() pcache = pcache2 = listcache = cache_in_obj = delete_cache = cache = stub_cache globals().update(create_decorators(mc)) def mc_gets(mc_key, getter, ids): '''helpler for gets function''' results = mc.get_multi([mc_key % i for i in ids]) return [results.get(mc_key % i) or getter(i) for i in ids] # mysql def connect_mysql(): return connect(use_unicode=True) def make_dict(cursor, row): return dict(zip((str(d[0]) for d in cursor.description), row)) store = store_from_config(MYSQL_STORE, use_cache=False) setup(store, mc)
def prepare_store(self, use_cache=False, created_via='test_sqlstore', **kwargs): return M.store_from_config(self.database, use_cache=use_cache, created_via=created_via, **kwargs)
def test_store_from_config_with_dict_should_cache_store_objects(self): store1 = M.store_from_config(self.database) store2 = M.store_from_config(self.database) ok_(store1 is store2, 'store1 and store2 are not same')
def test_store_from_config_with_dict_should_not_cache(self): store1 = M.store_from_config(self.database) store2 = M.store_from_config(self.database, use_cache=False) ok_(store1 is not store2, 'store1 and store2 are same')
def test_store_from_config_with_dict_should_cache_store_objects(self): store1 = M.store_from_config(self.database) store2 = M.store_from_config(self.database) ok_(store1 is store2, 'store1 and store2 are not same')
def test_store_from_config_should_accept_dict_as_parameter(self): M.store_from_config({})
def test_store_from_config_with_dict_should_not_cache(self): store1 = M.store_from_config(self.database) store2 = M.store_from_config(self.database, use_cache=False) ok_(store1 is not store2, 'store1 and store2 are same')
def main(): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help='sqlstore config') parser.add_argument('--without-drop-table', action='store_true') parser.add_argument('--verbose', action='store_true') parser.add_argument('--keep-auto-increment', action='store_true') parser.add_argument( '--only-meaningful-changes', action='store_true', help='Do not treat as change if only AUTO_INCREMENT changes') args = parser.parse_args() if not args.config: print 'sqlstore config must be specified' return 1 re_auto_increment = re.compile('\s+AUTO_INCREMENT=\d+') schema_cache = {} if args.only_meaningful_changes: try: schema_cache = pickle.load(open(SCHEMA_CACHE)) except: pass store = store_from_config(args.config) for name, farm in store.farms.items(): output_file = 'database-{}.sql'.format(name) tmp_output_file = '{}-tmp'.format(output_file) if args.verbose: print 'Dump schema in {} to {}...'.format(name, output_file) cursor = farm.get_cursor() cursor.execute('show tables') tables = sorted([r[0] for r in cursor.fetchall()]) fail = False with open(tmp_output_file, 'w') as f: f.write( '/*!40101 SET @saved_cs_client = @@character_set_client */;\n') f.write('/*!40101 SET character_set_client = utf8 */;\n\n') for table in tables: try: if not args.without_drop_table: f.write('DROP TABLE IF EXISTS `{}`;\n'.format(table)) cursor.execute('show create table `{}`'.format(table)) schema = cursor.fetchone()[-1] if not args.keep_auto_increment: schema = re_auto_increment.sub('', schema) elif args.only_meaningful_changes: _table = '{}.{}'.format(name, table) _schema = schema_cache.get(_table) if _schema and (re_auto_increment.sub('', _schema) == \ re_auto_increment.sub('', schema)): # only AUTO_INCREMENT changes, definition does not # change, use cached schema to keep AUTO_INCREMENT schema = _schema else: schema_cache[_table] = schema f.write('{};\n\n'.format(schema)) except Exception, exc: fail = True msg = 'dump schema of "{}.{}" fail: {}'.format( name, table, exc) print >> sys.stderr, msg break f.write( '/*!40101 SET character_set_client = @saved_cs_client */;\n') if not fail: os.rename(tmp_output_file, output_file) else: try: os.remove(tmp_output_file) except Exception, exc: print >> sys.stderr, 'remove tmp file "{}" fail: {}'.format( tmp_output_file, exc)
def mc_gets(mc_key, getter, ids): '''helpler for gets function''' results = mc.get_multi([mc_key % i for i in ids]) return [results.get(mc_key % i) or getter(i) for i in ids] # mysql def connect_mysql(): return connect(use_unicode=True) def make_dict(cursor, row): return dict(zip((str(d[0]) for d in cursor.description), row)) def reset_mc(): pass def reset_beansdb(): pass def clear_local_cache(): reset_mc() reset_beansdb() store = store_from_config(MYSQL_STORE, use_cache=False) setup(store, mc)
def test_normal_load_configfile(self): store = M.store_from_config('test-online', use_cache=False) eq_(store.db_config_name, 'test-online') eq_(store.get_farm('farm1').dbcnf['db'], 'test_sqlstore1')
def main(): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help='sqlstore config') parser.add_argument('--without-drop-table', action='store_true') parser.add_argument('--verbose', action='store_true') parser.add_argument('--keep-auto-increment', action='store_true') parser.add_argument('--only-meaningful-changes', action='store_true', help='Do not treat as change if only AUTO_INCREMENT changes') args = parser.parse_args() if not args.config: print 'sqlstore config must be specified' return 1 re_auto_increment = re.compile('\s+AUTO_INCREMENT=\d+') schema_cache = {} if args.only_meaningful_changes: try: schema_cache = pickle.load(open(SCHEMA_CACHE)) except: pass store = store_from_config(args.config) for name, farm in store.farms.items(): output_file = 'database-{}.sql'.format(name) tmp_output_file = '{}-tmp'.format(output_file) if args.verbose: print 'Dump schema in {} to {}...'.format(name, output_file) cursor = farm.get_cursor() cursor.execute('show tables') tables = sorted([r[0] for r in cursor.fetchall()]) fail = False with open(tmp_output_file, 'w') as f: f.write('/*!40101 SET @saved_cs_client = @@character_set_client */;\n') f.write('/*!40101 SET character_set_client = utf8 */;\n\n') for table in tables: try: if not args.without_drop_table: f.write('DROP TABLE IF EXISTS `{}`;\n'.format(table)) cursor.execute('show create table `{}`'.format(table)) schema = cursor.fetchone()[-1] if not args.keep_auto_increment: schema = re_auto_increment.sub('', schema) elif args.only_meaningful_changes: _table = '{}.{}'.format(name, table) _schema = schema_cache.get(_table) if _schema and (re_auto_increment.sub('', _schema) == \ re_auto_increment.sub('', schema)): # only AUTO_INCREMENT changes, definition does not # change, use cached schema to keep AUTO_INCREMENT schema = _schema else: schema_cache[_table] = schema f.write('{};\n\n'.format(schema)) except Exception, exc: fail = True msg = 'dump schema of "{}.{}" fail: {}'.format(name, table, exc) print >>sys.stderr, msg break f.write('/*!40101 SET character_set_client = @saved_cs_client */;\n') if not fail: os.rename(tmp_output_file, output_file) else: try: os.remove(tmp_output_file) except Exception, exc: print >>sys.stderr, 'remove tmp file "{}" fail: {}'.format(tmp_output_file, exc)
def test_replace(self): M.replace_sqlstore_config('test-online', 'test-offline') store = M.store_from_config('test-online', use_cache=False) eq_(store.db_config_name, 'test-offline') eq_(store.get_farm('farm1').dbcnf['db'], 'test_sqlstore2')
DATABASE = { 'farms': { "luz_farm": { "master": "localhost:test_vagrant9010:eye:sauron", "tables": ["*"], }, }, 'options': { 'show_warnings': True, } } from unittest import TestCase store = store_from_config(DATABASE) mc.clear() cursor = store.get_cursor() cursor.delete_without_where = True cursor.execute('''DROP TABLE IF EXISTS `test_t`''') cursor.execute(''' CREATE TABLE `test_t` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT, `subject_id` int(10) unsigned NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uk_subject` (`subject_id`)) ENGINE=MEMORY AUTO_INCREMENT=1''') cursor.execute('''DROP TABLE IF EXISTS `test_a`''') cursor.execute(''' CREATE TABLE `test_a` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
def test_normal_load_configfile(self): store = M.store_from_config('test-online', use_cache=False) eq_(store.db_config_name, 'test-online') eq_(store.get_farm('farm1').dbcnf['db'], 'test_sqlstore1')