def test_large_doc_20MB(self): # test reproducer for MB-29258, # Load a doc which is greater than 20 MB with compression enabled and check if it fails # check with compression_mode as active, passive and off document_size = self.input.param('document_size', 20) gens_load = self.generate_docs_bigdata(docs_per_day=1, document_size=(document_size * 1024000)) self.load(gens_load, buckets=self.src_bucket, verify_data=False, batch_size=10) mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') stats = mc.stats() if (document_size > 20): self.assertEquals( int(stats['curr_items']), 0 ) # failed with error "Data Too Big" when document size > 20MB else: self.assertEquals(int(stats['curr_items']), 1) gens_update = self.generate_docs_bigdata(docs_per_day=1, document_size=(21 * 1024000)) self.load(gens_update, buckets=self.src_bucket, verify_data=False, batch_size=10) stats = mc.stats() self.assertEquals(int(stats['curr_items']), 1)
def do_get_random_key(self): # MB-31548, get_Random key gets hung sometimes. self.log.info("Creating few docs in the bucket") rest = RestConnection(self.master) client = VBucketAwareMemcached(rest, 'default') key = "test_docs-" for index in range(1000): doc_key = key + str(index) client.memcached(doc_key).set(doc_key, 0, 0, json.dumps({'value': 'value1'})) self.log.info("Performing random_gets") mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') count = 0 while (count < 1000000): count = count + 1 try: mc.get_random_key() except MemcachedError as error: self.fail("<MemcachedError #%d ``%s''>" % (error.status, error.message)) if count % 1000 == 0: self.log.info('The number of iteration is {}'.format(count))
def verify_stat(self, items, value="active"): mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') stats = mc.stats() self.assertEquals(stats['ep_compression_mode'], value) self.assertEquals(int(stats['ep_item_compressor_num_compressed']), items) self.assertNotEquals(int(stats['vb_active_itm_memory']), int(stats['vb_active_itm_memory_uncompressed']))
def do_get_random_key(self): # MB-31548, get_Random key gets hung sometimes. mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') count = 0 while (count < 1000000): count = count + 1 try: mc.get_random_key() except MemcachedError as error: self.fail("<MemcachedError #%d ``%s''>" % (error.status, error.message)) if count % 1000 == 0: self.log.info('The number of iteration is {}'.format(count))
def test_bucket_select_audit(self): # security.audittest.auditTest.test_bucket_select_audit,default_bucket=false,id=20492 rest = RestConnection(self.master) rest.create_bucket(bucket='TestBucket', ramQuotaMB=100) time.sleep(30) mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('TestBucket') expectedResults = {"bucket":"TestBucket","description":"The specified bucket was selected","id":self.eventID,"name":"select bucket" \ ,"peername":"127.0.0.1:46539","real_userid":{"domain":"memcached","user":"******"},"sockname":"127.0.0.1:11209"} Audit = audit(eventID=self.eventID, host=self.master) actualEvent = Audit.returnEvent(self.eventID) Audit.validateData(actualEvent, expectedResults)
def test_large_doc_size_2MB(self): # bucket size =256MB, when Bucket gets filled 236MB then the test starts failing # document size =2MB, No of docs = 108 , load 150 docs # epengine.basic_ops.basic_ops.test_large_doc_size_2MB,skip_cleanup = True,document_size=2048000,dgm_run=True docs_per_day = 5 document_size = self.input.param('document_size') # generate docs with size >= 1MB , See MB-29333 gens_load = self.generate_docs_bigdata(docs_per_day=(25 * docs_per_day), document_size=document_size) self.load(gens_load, buckets=self.src_bucket, verify_data=False, batch_size=10) # check if all the documents(125) are loaded else the test has failed with "Memcached Error 134" mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') stats = mc.stats() self.assertEquals(int(stats['curr_items']), 125)
def test_memecached_basic_api(self): # epengine.basic_collections.basic_collections.test_memecached_basic_api scope_name="ScopeWith30CharactersinName123" Collection_name="CollectionsWithLargeNamechecki" self.create_scope(scope=scope_name) self.create_collection(scope=scope_name, collection=Collection_name) collection= scope_name + "." + Collection_name self.log.info("colelction name is {}".format(collection)) self.sleep(10) # create memcached client mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) # enable collection and get collections mc.enable_collections() mc.bucket_select('default') #mc.hello(memcacheConstants.FEATURE_COLLECTIONS) mc.hello("set_collection") ret=mc.get_collections(True) self.log.info("get collections completed") try: mc.set("key", 0, 0, "value", collection=collection) flag, keyx, value = mc.get(key="key", collection=collection) print("flag:{} keyx:{} value:{}".format(flag, keyx, value)) except MemcachedError as exp: self.fail("Exception with setting and getting the key in collections {0}".format(exp) )
def do_setWithMeta_twice(self): mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message)) self.fail("Error on First setWithMeta()") stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format(stats['curr_items'], stats['curr_temp_items'])) self.log.info("Sleeping for 5 and checking stats again") time.sleep(5) stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format(stats['curr_items'], stats['curr_temp_items'])) try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: stats = mc.stats() self.log.info('After 2nd setWithMeta(), curr_items: {} and curr_temp_items:{}'.format(stats['curr_items'], stats['curr_temp_items'])) if int(stats['curr_temp_items']) == 1: self.fail("Error on second setWithMeta(), expected curr_temp_items to be 0") else: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message))
def do_setWithMeta_twice(self): mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message)) self.fail("Error on First setWithMeta()") stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format( stats['curr_items'], stats['curr_temp_items'])) self.log.info("Sleeping for 5 and checking stats again") time.sleep(5) stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format( stats['curr_items'], stats['curr_temp_items'])) try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: stats = mc.stats() self.log.info( 'After 2nd setWithMeta(), curr_items: {} and curr_temp_items:{}' .format(stats['curr_items'], stats['curr_temp_items'])) if int(stats['curr_temp_items']) == 1: self.fail( "Error on second setWithMeta(), expected curr_temp_items to be 0" ) else: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message))
def test_epengine_save_meta_to_48bits_in_ram(self): """ As in MB-29119, ep-engine must save rev-seqno in 48 bits This fix went into version 5.1+ params: eviction_policy=fullEviction,sasl_buckets=1 """ if 5.1 > float(self.cb_version[:3]): self.log.info("This test only work for version 5.1+") return if len(self.buckets) >= 2: self.fail("This test only runs in one bucket") rest = RestConnection(self.master) success_set_exist_item = False mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain('Administrator', 'password') mc.bucket_select('standard_bucket0') mc.vbucketId = 903 self.log.info("Start to test") mc.setWithMeta('test_with_meta', 'value', 0, 0, 0x1234000000000001, 1) self.sleep(10) item_count = rest.get_active_key_count("standard_bucket0") if int(item_count) != 1: self.fail("Fail to set 1 key to standard_bucket0") mc.evict_key('test_with_meta') try: mc.setWithMeta('test_with_meta', 'value', 0, 0, 0x1234000000000001, 1) success_set_exist_item = True except MemcachedError as e: print("\nMemcached exception: ", e) if "#2" not in str(e): self.fail("ep engine failed to check existed key") """error #2 is ErrorKeyEexists""" if success_set_exist_item: self.fail("ep engine could set an existed key")
def test_epengine_save_meta_to_48bits_in_ram(self): """ As in MB-29119, ep-engine must save rev-seqno in 48 bits This fix went into version 5.1+ params: eviction_policy=fullEviction,sasl_buckets=1 """ if 5.1 > float(self.cb_version[:3]): self.log.info("This test only work for version 5.1+") return if len(self.buckets) >= 2: self.fail("This test only runs in one bucket") rest = RestConnection(self.master) success_set_exist_item = False mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain('Administrator', 'password') mc.bucket_select('bucket0') mc.vbucketId = 903 self.log.info("Start to test") mc.setWithMeta('test_with_meta', 'value', 0, 0, 0x1234000000000001, 1) self.sleep(10) item_count = rest.get_active_key_count("bucket0") if int(item_count) != 1: self.fail("Fail to set 1 key to bucket0") mc.evict_key('test_with_meta') try: mc.setWithMeta('test_with_meta', 'value', 0, 0, 0x1234000000000001, 1) success_set_exist_item = True except MemcachedError as e: print "\nMemcached exception: ", e if "#2" not in str(e): self.fail("ep engine failed to check existed key") """error #2 is ErrorKeyEexists""" if success_set_exist_item: self.fail("ep engine could set an existed key")
def test_valid_bucket_name(self, password='******'): tasks = [] shared_params = self._create_bucket_params(server=self.server, size=self.bucket_size, replicas=self.num_replicas) if self.bucket_type == 'sasl': self.cluster.create_sasl_bucket(name=self.bucket_name, password=password, bucket_params=shared_params) self.buckets.append( Bucket(name=self.bucket_name, num_replicas=self.num_replicas, bucket_size=self.bucket_size, master_id=self.server)) elif self.bucket_type == 'standard': self.cluster.create_standard_bucket(name=self.bucket_name, port=STANDARD_BUCKET_PORT + 1, bucket_params=shared_params) self.buckets.append( Bucket(name=self.bucket_name, num_replicas=self.num_replicas, bucket_size=self.bucket_size, port=STANDARD_BUCKET_PORT + 1, master_id=self.server)) elif self.bucket_type == "memcached": tasks.append( self.cluster.async_create_memcached_bucket( name=self.bucket_name, port=STANDARD_BUCKET_PORT + 1, bucket_params=shared_params)) self.buckets.append( Bucket(name=self.bucket_name, num_replicas=self.num_replicas, bucket_size=self.bucket_size, port=STANDARD_BUCKET_PORT + 1, master_id=self.server, type='memcached')) for task in tasks: task.result() else: self.log.error('Bucket type not specified') return self.assertTrue(BucketOperationHelper.wait_for_bucket_creation( self.bucket_name, self.rest), msg='failed to start up bucket with name "{0}'.format( self.bucket_name)) if self.bucket_type == "memcached": mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select(self.bucket_name) for i in range(self.num_items): Key = "key" + str(i) try: mc.set(Key, 0, 0, "value1") except MemcachedError as error: self.fail("Error on creating a doc") else: gen_load = BlobGenerator('buckettest', 'buckettest-', self.value_size, start=0, end=self.num_items) self._load_all_buckets(self.server, gen_load, "create", 0) self.cluster.bucket_delete(self.server, self.bucket_name) self.assertTrue( BucketOperationHelper.wait_for_bucket_deletion( self.bucket_name, self.rest, timeout_in_seconds=60), msg='bucket "{0}" was not deleted even after waiting for 30 seconds' .format(self.bucket_name))