def test_memecached_basic_api(self): # epengine.basic_collections.basic_collections.test_memecached_basic_api scope_name = "ScopeWith30CharactersinName123" Collection_name = "CollectionsWithLargeNamechecki" self.rest.create_scope(scope=scope_name) self.rest.create_collection(scope=scope_name, collection=Collection_name, bucket=self.default_bucket_name) collection = scope_name + "." + Collection_name self.log.info("collection name is {}".format(collection)) self.sleep(10) # create memcached client mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) # enable collection and get collections mc.enable_collections() mc.bucket_select('default') # mc.hello(memcacheConstants.FEATURE_COLLECTIONS) mc.hello("set_collection") mc.get_collections(True) self.log.info("get collections completed") try: mc.set("key", 0, 0, "value", collection=collection) flag, keyx, value = mc.get(key="key", collection=collection) print("flag:{} keyx:{} value:{}".format(flag, keyx, value)) except MemcachedError as exp: self.fail("Exception with setting and getting the key in collections {0}".format(exp))
def verify_revid(self): missing_keys = False src_node = self.get_active_vb0_node(self.src_master) dest_node = self.get_active_vb0_node(self.dest_master) src_client = MemcachedClient(src_node.ip, 11210) dest_client = MemcachedClient(dest_node.ip, 11210) src_client.sasl_auth_plain("cbadminbucket", "password") src_client.bucket_select("default") dest_client.sasl_auth_plain("cbadminbucket", "password") dest_client.bucket_select("default") for key in self.keys_loaded: try: src_meta = src_client.getMeta(key) dest_meta = dest_client.getMeta(key) self.log.info("deleted, flags, exp, rev_id, cas for key from Source({0}) {1} = {2}" .format(src_node.ip, key, src_meta)) self.log.info("deleted, flags, exp, rev_id, cas for key from Destination({0}) {1} = {2}" .format(dest_node.ip, key, dest_meta)) if src_meta == dest_meta: self.log.info("RevID verification successful for key {0}".format(key)) else: self.fail("RevID verification failed for key {0}".format(key)) except MemcachedError as e: self.log.error("Key {0} threw {1} on getMeta()".format(key, e)) missing_keys = True if missing_keys: self.fail("Some keys are missing at destination")
def verify_revid(self): missing_keys = False src_node = self.get_active_vb0_node(self.src_master) dest_node = self.get_active_vb0_node(self.dest_master) src_client = MemcachedClient(src_node.ip, 11210) dest_client = MemcachedClient(dest_node.ip, 11210) src_client.sasl_auth_plain("cbadminbucket","password") src_client.bucket_select("default") dest_client.sasl_auth_plain("cbadminbucket","password") dest_client.bucket_select("default") for key in self.keys_loaded: try: src_meta = src_client.getMeta(key) dest_meta = dest_client.getMeta(key) self.log.info("deleted, flags, exp, rev_id, cas for key from Source({0}) {1} = {2}" .format(src_node.ip, key, src_meta)) self.log.info("deleted, flags, exp, rev_id, cas for key from Destination({0}) {1} = {2}" .format(dest_node.ip, key, dest_meta)) if src_meta == dest_meta: self.log.info("RevID verification successful for key {0}".format(key)) else: self.fail("RevID verification failed for key {0}".format(key)) except MemcachedError as e: self.log.error("Key {0} threw {1} on getMeta()".format(key, e)) missing_keys = True if missing_keys: self.fail("Some keys are missing at destination")
def connection(self, client_ip, bucket_name, user,password, port=11210): log.info("Bucket name for connection is ---- {0}, username -- {1}, ----- password -- {2}".format(bucket_name,user, \ password)) try: mc = MemcachedClient(host=client_ip, port=port) mc.sasl_auth_plain(user,password) mc.bucket_select(bucket_name) return mc, True except Exception as e: log.info( "Exception is from connection function {0}".format(e)) return False, False
def connection(self, client_ip, bucket_name, user,password, port=11210): log.info("Bucket name for connection is ---- {0}, username -- {1}, ----- password -- {2}".format(bucket_name, user, \ password)) try: mc = MemcachedClient(host=client_ip, port=port) mc.sasl_auth_plain(user, password) mc.bucket_select(bucket_name) return mc, True except Exception as e: log.info( "Exception is from connection function {0}".format(e)) return False, False
def verify_stat(self, items, value="active"): mc = MemcachedClient(self.cluster.master.ip, constants.memcached_port) mc.sasl_auth_plain(self.cluster.master.rest_username, self.cluster.master.rest_password) mc.bucket_select('default') stats = mc.stats() self.assertEquals(stats['ep_compression_mode'], value) self.assertEquals(int(stats['ep_item_compressor_num_compressed']), items) self.assertNotEquals(int(stats['vb_active_itm_memory']), int(stats['vb_active_itm_memory_uncompressed']))
def load_one_mutation_into_source_vb0(self, vb0_active_src_node): key = self.vb0_keys[self.key_counter] memc_client = MemcachedClient(vb0_active_src_node.ip, 11210) memc_client.sasl_auth_plain("cbadminbucket", "password") memc_client.bucket_select("default") try: memc_client.set(key, exp=0, flags=0, val="dummy val") self.key_counter += 1 self.keys_loaded.append(key) self.log.info("Loaded key {0} onto vb0 in {1}".format(key, vb0_active_src_node.ip)) self.log.info ("deleted, flags, exp, rev_id, cas for key {0} = {1}".format(key, memc_client.getMeta(key))) except MemcachedError as e: self.log.error(e)
def load_one_mutation_into_source_vb0(self, vb0_active_src_node): key = self.vb0_keys[self.key_counter] memc_client = MemcachedClient(vb0_active_src_node.ip, 11210) memc_client.sasl_auth_plain("cbadminbucket","password") memc_client.bucket_select("default") try: memc_client.set(key, exp=0, flags=0, val="dummy val") self.key_counter += 1 self.keys_loaded.append(key) self.log.info("Loaded key {0} onto vb0 in {1}".format(key, vb0_active_src_node.ip)) self.log.info ("deleted, flags, exp, rev_id, cas for key {0} = {1}".format(key, memc_client.getMeta(key))) except MemcachedError as e: self.log.error(e)
def wait_for_warmup(self, host, port): if self.bucket_type == 'ephemeral': return while True: client = McdClient(host, port) try: client.bucket_select("default") response = client.stats() # check the old style or new style (as of 4.5) results mode = response.get('ep_degraded_mode') if mode is not None: if mode == '0' or mode == 'false': break except Exception as ex: pass self.sleep(1)
def do_get_random_key(self): # MB-31548, get_Random key gets hung sometimes. mc = MemcachedClient(self.cluster.master.ip, constants.memcached_port) mc.sasl_auth_plain(self.cluster.master.rest_username, self.cluster.master.rest_password) mc.bucket_select('default') count = 0 while count < 1000000: count += 1 try: mc.get_random_key() except MemcachedError as error: self.fail("<MemcachedError #%d ``%s''>" % (error.status, error.message)) if count % 1000 == 0: self.log.info('The number of iteration is {}'.format(count))
def do_setWithMeta_twice(self): mc = MemcachedClient(self.cluster.master.ip, 11210) mc.sasl_auth_plain(self.cluster.master.rest_username, self.cluster.master.rest_password) mc.bucket_select('default') try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message)) self.fail("Error on First setWithMeta()") stats = mc.stats() self.log.info('curr_items: {0} and curr_temp_items:{1}'.format( stats['curr_items'], stats['curr_temp_items'])) self.log.info("Sleeping for 5 and checking stats again") time.sleep(5) stats = mc.stats() self.log.info('curr_items: {0} and curr_temp_items:{1}'.format( stats['curr_items'], stats['curr_temp_items'])) try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: stats = mc.stats() self.log.info( 'After 2nd setWithMeta(), curr_items: {} and curr_temp_items:{}' .format(stats['curr_items'], stats['curr_temp_items'])) if int(stats['curr_temp_items']) == 1: self.fail( "Error on second setWithMeta(), expected curr_temp_items to be 0" ) else: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message))
def wait_for_vbuckets_ready_state(node, bucket, timeout_in_seconds=300, log_msg='', admin_user='******', admin_pass='******'): start_time = time.time() end_time = start_time + timeout_in_seconds ready_vbuckets = {} log = logger.get("infra") rest = RestConnection(node) # servers = rest.get_nodes() bucket_conn = BucketHelper(node) bucket_conn.vbucket_map_ready(bucket, 60) vbucket_count = len(bucket_conn.get_vbuckets(bucket)) vbuckets = bucket_conn.get_vbuckets(bucket) obj = VBucketAwareMemcached(rest, bucket, info=node) memcacheds, vbucket_map, vbucket_map_replica = obj.request_map( rest, bucket) #Create dictionary with key:"ip:port" and value: a list of vbuckets server_dict = defaultdict(list) for everyID in range(0, vbucket_count): memcached_ip_port = str(vbucket_map[everyID]) server_dict[memcached_ip_port].append(everyID) while time.time() < end_time and len(ready_vbuckets) < vbucket_count: for every_ip_port in server_dict: #Retrieve memcached ip and port ip, port = every_ip_port.split(":") client = MemcachedClient(ip, int(port), timeout=30) client.vbucket_count = len(vbuckets) bucket_info = bucket_conn.get_bucket(bucket) versions = rest.get_nodes_versions(logging=False) pre_spock = False for version in versions: if "5" > version: pre_spock = True if pre_spock: log.info("Atleast 1 of the server is on pre-spock " "version. Using the old ssl auth to connect to " "bucket.") client.sasl_auth_plain( bucket_info.name.encode('ascii'), bucket_info.saslPassword.encode('ascii')) else: client.sasl_auth_plain(admin_user, admin_pass) bucket = bucket.encode('ascii') client.bucket_select(bucket) for i in server_dict[every_ip_port]: try: (a, b, c) = client.get_vbucket_state(i) except mc_bin_client.MemcachedError as e: ex_msg = str(e) if "Not my vbucket" in log_msg: log_msg = log_msg[:log_msg.find("vBucketMap") + 12] + "..." if e.status == memcacheConstants.ERR_NOT_MY_VBUCKET: # May receive this while waiting for vbuckets, continue and retry...S continue log.error("%s: %s" % (log_msg, ex_msg)) continue except exceptions.EOFError: # The client was disconnected for some reason. This can # happen just after the bucket REST API is returned (before # the buckets are created in each of the memcached processes.) # See here for some details: http://review.couchbase.org/#/c/49781/ # Longer term when we don't disconnect clients in this state we # should probably remove this code. log.error( "got disconnected from the server, reconnecting") continue if c.find("\x01") > 0 or c.find("\x02") > 0: ready_vbuckets[i] = True elif i in ready_vbuckets: log.warning( "vbucket state changed from active to {0}".format( c)) del ready_vbuckets[i] client.close() return len(ready_vbuckets) == vbucket_count
def wait_for_vbuckets_ready_state(node, bucket, timeout_in_seconds=300, log_msg='', admin_user='******', admin_pass='******'): log = logger.Logger.get_logger() start_time = time.time() end_time = start_time + timeout_in_seconds ready_vbuckets = {} rest = RestConnection(node) servers = rest.get_nodes() RestHelper(rest).vbucket_map_ready(bucket, 60) vbucket_count = len(rest.get_vbuckets(bucket)) vbuckets = rest.get_vbuckets(bucket) obj = VBucketAwareMemcached(rest, bucket) memcacheds, vbucket_map, vbucket_map_replica = obj.request_map(rest, bucket) #Create dictionary with key:"ip:port" and value: a list of vbuckets server_dict = defaultdict(list) for everyID in range(0, vbucket_count): memcached_ip_port = str(vbucket_map[everyID]) server_dict[memcached_ip_port].append(everyID) while time.time() < end_time and len(ready_vbuckets) < vbucket_count: for every_ip_port in server_dict: #Retrieve memcached ip and port ip = every_ip_port.rsplit(":", 1)[0] port = every_ip_port.rsplit(":", 1)[1] client = MemcachedClient(ip, int(port), timeout=30) client.vbucket_count = len(vbuckets) bucket_info = rest.get_bucket(bucket) cluster_compatibility = rest.check_cluster_compatibility("5.0") if cluster_compatibility is None: pre_spock = True else: pre_spock = not cluster_compatibility if pre_spock: log.info("Atleast 1 of the server is on pre-spock " "version. Using the old ssl auth to connect to " "bucket.") client.sasl_auth_plain( bucket_info.name.encode('ascii'), bucket_info.saslPassword.encode('ascii')) else: client.sasl_auth_plain(admin_user, admin_pass) bucket = bucket.encode('ascii') client.bucket_select(bucket) for i in server_dict[every_ip_port]: try: (a, b, c) = client.get_vbucket_state(i) except mc_bin_client.MemcachedError as e: ex_msg = str(e) if "Not my vbucket" in log_msg: log_msg = log_msg[:log_msg.find("vBucketMap") + 12] + "..." if e.status == memcacheConstants.ERR_NOT_MY_VBUCKET: # May receive this while waiting for vbuckets, continue and retry...S continue log.error("%s: %s" % (log_msg, ex_msg)) continue except exceptions.EOFError: # The client was disconnected for some reason. This can # happen just after the bucket REST API is returned (before # the buckets are created in each of the memcached processes.) # See here for some details: http://review.couchbase.org/#/c/49781/ # Longer term when we don't disconnect clients in this state we # should probably remove this code. log.error("got disconnected from the server, reconnecting") client.reconnect() client.sasl_auth_plain(bucket_info.name.encode('ascii'), bucket_info.saslPassword.encode('ascii')) continue if c.find("\x01") > 0 or c.find("\x02") > 0: ready_vbuckets[i] = True elif i in ready_vbuckets: log.warning("vbucket state changed from active to {0}".format(c)) del ready_vbuckets[i] client.close() return len(ready_vbuckets) == vbucket_count