def __init__(self, testcase, bucket): self.testcase = testcase self.bucket = bucket self.input = TestInputSingleton.input self.servers = self.input.servers self.master = self.servers[0] self.rest = RestConnection(self.master) self.log = logger.Logger.get_logger() self.client = MemcachedClient(host=self.master.ip) self.jsonSchema = { "id" : "0", "number" : 0, "array" : [], "child" : {}, "isDict" : True, "padding": None } self.jsonSchema_longPath = { "id" : "0", "number" : 0, "array12345678901234567890123456789" : [], "child12345678901234567890123456789" : {}, "isDict" : True, "padding": None }
def __init__(self, serverip="localhost", port=11211, bucket="default", password=""): self.client = MemcachedClient(serverip, port) self.client.sasl_auth_plain(bucket, password)
def setUp(self): super(SubdocSinglePathTests, self).setUp() self.client = MemcachedClient(host=self.server.ip) self.jsonSchema = { "id" : "0", "number" : 0, "array" : [], "child" : {}, "isDict" : True, "padding": None }
def _add_conn(self, server): if not self.store: print("<%s> failed to add conn, invalid store object"\ % self.__class__.__name__) return False if self.store.__class__.__name__ == "StoreMembaseBinary": print("<%s> _add_conn: %s"\ % (self.__class__.__name__, server)) host, port = server.split(":") conn = MemcachedClient(host, int(port)) self.conns[server] = conn return True
def test_xattr_compression(self): # MB-32669 # subdoc.subdoc_simple_dataset.SubdocSimpleDataset.test_xattr_compression,compression=active mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') self.key = "test_xattr_compression" self.nesting_level = 5 array = {'i_add': 0, 'i_sub': 1, 'a_i_a': [0, 1], 'ai_sub': [0, 1]} base_json = self.generate_json_for_nesting() nested_json = self.generate_nested(base_json, array, self.nesting_level) jsonDump = json.dumps(nested_json) stats = mc.stats() self.assertEquals(stats['ep_compression_mode'], 'active') scheme = "http" host = "{0}:{1}".format(self.master.ip, self.master.port) self.sdk_client = SDKClient(scheme=scheme, hosts=[host], bucket="default") self.sdk_client.set(self.key, value=jsonDump, ttl=60) rv = self.sdk_client.cb.mutate_in(self.key, SD.upsert('my.attr', "value", xattr=True, create_parents=True), ttl=60) self.assertTrue(rv.success) # wait for it to persist and then evict the key persisted = 0 while persisted == 0: opaque, rep_time, persist_time, persisted, cas = mc.observe( self.key) mc.evict_key(self.key) time.sleep(65) try: self.client.get(self.key) self.fail("the key should get expired") except mc_bin_client.MemcachedError as error: self.assertEquals(error.status, 1) stats = mc.stats() self.assertEquals(int(stats['curr_items']), 0) self.assertEquals(int(stats['curr_temp_items']), 0)
def _build_conns(self): """build separate connections based on store""" if not self.store: print("<%s> failed to build connections, invalid store object"\ % self.__class__.__name__) return False if self.store.__class__.__name__ == "StoreMemcachedBinary": conn = MemcachedClient(self.store.conn.host, self.store.conn.port) server_str = "{0}:{1}".format(self.store.conn.host, self.store.conn.port) self.conns[server_str] = conn elif self.store.__class__.__name__ == "StoreMembaseBinary": for memcached in self.store.awareness.memcacheds.values(): conn = MemcachedClient(memcached.host, memcached.port) server_str = "{0}:{1}".format(conn.host, conn.port) self.conns[server_str] = conn self.awareness = self.store.awareness else: print("<%s> error: unsupported store object %s" %\ (self.__class__.__name__, store.__class__.__name__)) return False return True