def do_setWithMeta_twice(self): mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message)) self.fail("Error on First setWithMeta()") stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format(stats['curr_items'], stats['curr_temp_items'])) self.log.info("Sleeping for 5 and checking stats again") time.sleep(5) stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format(stats['curr_items'], stats['curr_temp_items'])) try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: stats = mc.stats() self.log.info('After 2nd setWithMeta(), curr_items: {} and curr_temp_items:{}'.format(stats['curr_items'], stats['curr_temp_items'])) if int(stats['curr_temp_items']) == 1: self.fail("Error on second setWithMeta(), expected curr_temp_items to be 0") else: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message))
def test_large_doc_20MB(self): # test reproducer for MB-29258, # Load a doc which is greater than 20 MB with compression enabled and check if it fails # check with compression_mode as active, passive and off document_size = self.input.param('document_size', 20) gens_load = self.generate_docs_bigdata(docs_per_day=1, document_size=(document_size * 1024000)) self.load(gens_load, buckets=self.src_bucket, verify_data=False, batch_size=10) mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') stats = mc.stats() if (document_size > 20): self.assertEquals( int(stats['curr_items']), 0 ) # failed with error "Data Too Big" when document size > 20MB else: self.assertEquals(int(stats['curr_items']), 1) gens_update = self.generate_docs_bigdata(docs_per_day=1, document_size=(21 * 1024000)) self.load(gens_update, buckets=self.src_bucket, verify_data=False, batch_size=10) stats = mc.stats() self.assertEquals(int(stats['curr_items']), 1)
def verify_stat(self, items, value="active"): mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') stats = mc.stats() self.assertEquals(stats['ep_compression_mode'], value) self.assertEquals(int(stats['ep_item_compressor_num_compressed']), items) self.assertNotEquals(int(stats['vb_active_itm_memory']), int(stats['vb_active_itm_memory_uncompressed']))
def do_setWithMeta_twice(self): mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message)) self.fail("Error on First setWithMeta()") stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format( stats['curr_items'], stats['curr_temp_items'])) self.log.info("Sleeping for 5 and checking stats again") time.sleep(5) stats = mc.stats() self.log.info('curr_items: {} and curr_temp_items:{}'.format( stats['curr_items'], stats['curr_temp_items'])) try: mc.setWithMeta('1', '{"Hello":"World"}', 3600, 0, 1, 0x1512a3186faa0000) except MemcachedError as error: stats = mc.stats() self.log.info( 'After 2nd setWithMeta(), curr_items: {} and curr_temp_items:{}' .format(stats['curr_items'], stats['curr_temp_items'])) if int(stats['curr_temp_items']) == 1: self.fail( "Error on second setWithMeta(), expected curr_temp_items to be 0" ) else: self.log.info("<MemcachedError #%d ``%s''>" % (error.status, error.message))
def test_large_doc_size_2MB(self): # bucket size =256MB, when Bucket gets filled 236MB then the test starts failing # document size =2MB, No of docs = 108 , load 150 docs # epengine.basic_ops.basic_ops.test_large_doc_size_2MB,skip_cleanup = True,document_size=2048000,dgm_run=True docs_per_day = 5 document_size = self.input.param('document_size') # generate docs with size >= 1MB , See MB-29333 gens_load = self.generate_docs_bigdata(docs_per_day=(25 * docs_per_day), document_size=document_size) self.load(gens_load, buckets=self.src_bucket, verify_data=False, batch_size=10) # check if all the documents(125) are loaded else the test has failed with "Memcached Error 134" mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) mc.bucket_select('default') stats = mc.stats() self.assertEquals(int(stats['curr_items']), 125)