def test_vbucket_id_option(self): bucket = RestConnection(self.server_origin).get_bucket(self.buckets[0]) self.num_items = self.num_items - (self.num_items % len(bucket.vbuckets)) num_items_per_vb = self.num_items // len(bucket.vbuckets) template = '{{ "mutated" : 0, "age": {0}, "first_name": "{1}" }}' gen_load = DocumentGenerator('cbtransfer', template, list(range(5)), ['james', 'john'], start=0, end=self.num_items) client = MemcachedClient(self.server_origin.ip, int(bucket.vbuckets[0].master.split(':')[1])) kv_value_dict = {} vb_id_to_check = bucket.vbuckets[-1].id for vb_id in range(len(bucket.vbuckets)): cur_items_per_vb = 0 while cur_items_per_vb < num_items_per_vb: key, value = next(gen_load) client.set(key, 0, 0, value, vb_id) if vb_id_to_check == vb_id: kv_value_dict[key] = value cur_items_per_vb += 1 transfer_source = 'http://%s:%s' % (self.server_origin.ip, self.server_origin.port) transfer_destination = 'http://%s:%s' % (self.server_recovery.ip, self.server_recovery.port) output = self.shell.execute_cbtransfer(transfer_source, transfer_destination, "-b %s -B %s -i %s" % (bucket.name, bucket.name, vb_id_to_check)) client = MemcachedClient(self.server_recovery.ip, int(bucket.vbuckets[0].master.split(':')[1])) for key, value in kv_value_dict.items(): _, _, d = client.get(key, vbucket=vb_id_to_check) self.assertEqual(d, value, 'Key: %s expected. Value expected %s. Value actual %s' % ( key, value, d))
def test_vbucket_id_option(self): bucket = RestConnection(self.server_origin).get_bucket(self.buckets[0]) self.num_items = self.num_items - (self.num_items % len(bucket.vbuckets)) num_items_per_vb = self.num_items / len(bucket.vbuckets) template = '{{ "mutated" : 0, "age": {0}, "first_name": "{1}" }}' gen_load = DocumentGenerator('cbtransfer', template, range(5), ['james', 'john'], start=0, end=self.num_items) client = MemcachedClient(self.server_origin.ip, int(bucket.vbuckets[0].master.split(':')[1])) kv_value_dict = {} vb_id_to_check = bucket.vbuckets[-1].id for vb_id in xrange(len(bucket.vbuckets)): cur_items_per_vb = 0 while cur_items_per_vb < num_items_per_vb: key, value = gen_load.next() client.set(key, 0, 0, value, vb_id) if vb_id_to_check == vb_id: kv_value_dict[key] = value cur_items_per_vb += 1 transfer_source = 'http://%s:%s' % (self.server_origin.ip, self.server_origin.port) transfer_destination = 'http://%s:%s' % (self.server_recovery.ip, self.server_recovery.port) output = self.shell.execute_cbtransfer(transfer_source, transfer_destination, "-b %s -B %s -i %s" % (bucket.name, bucket.name, vb_id_to_check)) client = MemcachedClient(self.server_recovery.ip, int(bucket.vbuckets[0].master.split(':')[1])) for key, value in kv_value_dict.iteritems(): _, _, d = client.get(key, vbucket=vb_id_to_check) self.assertEquals(d, value, 'Key: %s expected. Value expected %s. Value actual %s' % ( key, value, d))
class ComplianceTest(unittest.TestCase): def setUp(self): self.mc=MemcachedClient() self.mc.flush() def tearDown(self): self.mc.flush() self.mc.close() def testVersion(self): """Test the version command returns something.""" v=self.mc.version() self.assertTrue(len(v) > 0, "Bad version: ``" + str(v) + "''") def testSimpleSetGet(self): """Test a simple set and get.""" self.mc.set("x", 5, 19, "somevalue") self.assertGet((19, "somevalue"), self.mc.get("x")) def testZeroExpiration(self): """Ensure zero-expiration sets work properly.""" self.mc.set("x", 0, 19, "somevalue") time.sleep(1.1) self.assertGet((19, "somevalue"), self.mc.get("x")) def assertNotExists(self, key): try: x=self.mc.get(key) self.fail("Expected an exception, got " + `x`) except MemcachedError, e: self.assertEquals(memcacheConstants.ERR_NOT_FOUND, e.status)
def test_memecached_basic_api(self): # epengine.basic_collections.basic_collections.test_memecached_basic_api scope_name = "ScopeWith30CharactersinName123" Collection_name = "CollectionsWithLargeNamechecki" self.rest.create_scope(scope=scope_name) self.rest.create_collection(scope=scope_name, collection=Collection_name, bucket=self.default_bucket_name) collection = scope_name + "." + Collection_name self.log.info("collection name is {}".format(collection)) self.sleep(10) # create memcached client mc = MemcachedClient(self.master.ip, 11210) mc.sasl_auth_plain(self.master.rest_username, self.master.rest_password) # enable collection and get collections mc.enable_collections() mc.bucket_select('default') # mc.hello(memcacheConstants.FEATURE_COLLECTIONS) mc.hello("set_collection") mc.get_collections(True) self.log.info("get collections completed") try: mc.set("key", 0, 0, "value", collection=collection) flag, keyx, value = mc.get(key="key", collection=collection) print("flag:{} keyx:{} value:{}".format(flag, keyx, value)) except MemcachedError as exp: self.fail("Exception with setting and getting the key in collections {0}".format(exp))
def load_one_mutation_into_source_vb0(self, vb0_active_src_node): key = self.vb0_keys[self.key_counter] memc_client = MemcachedClient(vb0_active_src_node.ip, 11210) try: memc_client.set(key, exp=0, flags=0, val="dummy val") self.key_counter += 1 self.keys_loaded.append(key) self.log.info("Loaded key {} onto vb0 in {}".format(key, vb0_active_src_node.ip)) self.log.info ("deleted, flags, exp, rev_id, cas for key {} = {}".format(key, memc_client.getMeta(key))) except MemcachedError as e: self.log.error(e)
def load_one_mutation_into_source_vb0(self, vb0_active_src_node): key = self.vb0_keys[self.key_counter] memc_client = MemcachedClient(vb0_active_src_node.ip, 11210) memc_client.sasl_auth_plain("cbadminbucket", "password") memc_client.bucket_select("default") try: memc_client.set(key, exp=0, flags=0, val="dummy val") self.key_counter += 1 self.keys_loaded.append(key) self.log.info("Loaded key {0} onto vb0 in {1}".format(key, vb0_active_src_node.ip)) self.log.info ("deleted, flags, exp, rev_id, cas for key {0} = {1}".format(key, memc_client.getMeta(key))) except MemcachedError as e: self.log.error(e)
def load_one_mutation_into_source_vb0(self, vb0_active_src_node): key = self.vb0_keys[self.key_counter] memc_client = MemcachedClient(vb0_active_src_node.ip, 11210) memc_client.sasl_auth_plain("cbadminbucket","password") memc_client.bucket_select("default") try: memc_client.set(key, exp=0, flags=0, val="dummy val") self.key_counter += 1 self.keys_loaded.append(key) self.log.info("Loaded key {0} onto vb0 in {1}".format(key, vb0_active_src_node.ip)) self.log.info ("deleted, flags, exp, rev_id, cas for key {0} = {1}".format(key, memc_client.getMeta(key))) except MemcachedError as e: self.log.error(e)
def load_one_mutation_into_source_vb0(self, vb0_active_src_node): key = self.vb0_keys[self.key_counter] memc_client = MemcachedClient(vb0_active_src_node.ip, 11210) try: memc_client.set(key, exp=0, flags=0, val="dummy val") self.key_counter += 1 self.keys_loaded.append(key) self.log.info("Loaded key {} onto vb0 in {}".format( key, vb0_active_src_node.ip)) self.log.info( "deleted, flags, exp, rev_id, cas for key {} = {}".format( key, memc_client.getMeta(key))) except MemcachedError as e: self.log.error(e)
mc = MemcachedClient("10.17.12.20", 11210) #for key in keys: # vam.memcached(key).set(key, 1, 0, payload) # total_size += len(key) + len(payload) + 200 #time.sleep(10) #for i in range(0,1023): # mc.set_vbucket_state(i, 'active') new_thread = TapListener(queue, server) new_thread.start() i = 0 while i < 4000: for key in keys: # vam.memcached(key).get(key) mc.set(key, 10, 0, payload, vbucket=0) # for key in keys: # vam.memcached(key).get(key) # mc.set(key, 1, 0, payload, vbucket=0) try: a, b, c = mc.get(key, vbucket=0) # print c except: pass i += 1 # print i #for key in keys: # vam.memcached(key).get(key)
def poll(): url = "https://macbuild.hq.couchbase.com/xcode/api/integrations/filter/latest" latest = getJS(url) now = datetime.datetime.now() ts = now.strftime("%Y%m%d") build_no = "%s-%s" % (MOBILE_VERSION, ts[-5:]) client = McdClient(HOST, PORT) client.sasl_auth_plain("mobile", "") buildHist = {} for build in latest: if 'revisionBlueprint' not in latest[build]: continue key = latest[build]['revisionBlueprint']['DVTSourceControlWorkspaceBlueprintPrimaryRemoteRepositoryKey'] rev = latest[build]['revisionBlueprint']['DVTSourceControlWorkspaceBlueprintLocationsKey'][key]['DVTSourceControlLocationRevisionKey'] name = "iOS-"+latest[build]['bot']['name'].replace(" ","") build_id = latest[build]['number'] results = latest[build]['buildResultSummary'] totalCount = results['testsCount'] failCount = results['errorCount'] result = 'SUCCESS' if name in TOTAL_COUNT_LOG: if totalCount == 0: # use historical value for total count totalCount = TOTAL_COUNT_LOG[name] elif totalCount != TOTAL_COUNT_LOG: # update total count log TOTAL_COUNT_LOG[name] = totalCount else: TOTAL_COUNT_LOG[name] = totalCount if (failCount > 0): result = 'UNSTABLE' component = None for feature in MOBILE_FEATURES: tag, _c = feature.split("-") docname = name.upper() docname = docname.replace("-","_") if tag in docname: component = _c if component: doc = {'build_id': build_id, 'priority': 'P0', 'name': name, 'url': url, 'component': component, 'failCount': failCount, 'totalCount': totalCount, 'result': result, 'os': 'iOS', 'build': build_no} key = "%s-%s" % (doc["name"], doc["build_id"]) val = json.dumps(doc) try: #print val key = hashlib.md5(key).hexdigest() print val client.set(key, 0, 0, val, 0) buildHist[doc["build"]] = doc["build_id"] except Exception as ex: print ex print "set failed, couchbase down?: %s:%s" % (HOST,PORT)
def storeJob(jobDoc, bucket, first_pass = True): client = McdClient(HOST, PORT) client.sasl_auth_plain(bucket, "") doc = jobDoc url = doc["url"] res = getJS(url, {"depth" : 0}).json() if res is None: return buildHist = {} if res["lastBuild"]: bids = [b["number"] for b in res["builds"]] lastTotalCount = -1 idx=0 for bid in bids: idx = idx + 1 i = 1 if idx < len(bids): while bids[idx] != bid-i: key = "%s-%s" % (doc["name"], bid-i) key = hashlib.md5(key).hexdigest() try: client.delete(key, vbucket=0) except: pass i = i + 1 if bid in JOBS[doc["name"]]: continue # job already stored else: if first_pass == False: JOBS[doc["name"]].append(bid) doc["build_id"] = bid res = getJS(url+str(bid), {"depth" : 0}).json() if res is None: return if "result" not in res: continue doc["result"] = res["result"] doc["duration"] = res["duration"] if bucket == "server": if res["result"] not in ["SUCCESS", "UNSTABLE", "FAILURE", "ABORTED"]: continue # unknown result state actions = res["actions"] totalCount = getAction(actions, "totalCount") or 0 failCount = getAction(actions, "failCount") or 0 skipCount = getAction(actions, "skipCount") or 0 if totalCount == 0: if lastTotalCount == -1: continue # no tests ever passed for this build else: totalCount = lastTotalCount failCount = totalCount else: lastTotalCount = totalCount doc["failCount"] = failCount doc["totalCount"] = totalCount - skipCount params = getAction(actions, "parameters") if params is None: doc["priority"] = P1 doc["build"] = DEFAULT_BUILD else: doc["build"] = getAction(params, "name", "version_number") or getAction(params, "name", "cluster_version") or DEFAULT_BUILD doc["priority"] = getAction(params, "name", "priority") or P1 if doc["priority"].upper() not in [P0, P1, P2]: doc["priority"] = P1 doc["build"] = doc["build"].replace("-rel","").split(",")[0] try: _build= doc["build"].split("-") rel, bno = _build[0], _build[1] # check partial rel #'s rlen = len(rel.split(".")) while rlen < 3: rel = rel+".0" rlen+=1 # verify rel, build m=re.match("^\d\.\d\.\d{1,5}", rel) if m is None: print "unsupported version_number: "+doc["build"] continue m=re.match("^\d{1,10}", bno) if m is None: print "unsupported version_number: "+doc["build"] continue doc["build"] = "%s-%s" % (rel, bno.zfill(4)) except: print "unsupported version_number: "+doc["build"] continue else: # use date as version for sdk and mobile if res["result"] not in ["SUCCESS", "UNSTABLE", "FAILURE", "ABORTED"]: continue actions = res["actions"] totalCount = getAction(actions, "totalCount") or 0 failCount = getAction(actions, "failCount") or 0 skipCount = getAction(actions, "skipCount") or 0 if totalCount == 0: if lastTotalCount == -1: continue # no tests ever passed for this build else: totalCount = lastTotalCount failCount = totalCount else: lastTotalCount = totalCount doc["failCount"] = failCount doc["totalCount"] = totalCount - skipCount doc["priority"] = P0 ts = res["timestamp"]/1000; month = int(datetime.datetime.fromtimestamp(ts).strftime("%m")) _ts = datetime.datetime.fromtimestamp(ts).strftime("%Y-%m%d") yr, md = _ts.split("-") doc["build"] = "%s-%s%s" % (MOBILE_VERSION, yr[-1], md) if doc["build"] in buildHist: #print "REJECTED- doc already in build results: %s" % doc #print buildHist # attempt to delete if this record has been stored in couchbase try: oldKey = "%s-%s" % (doc["name"], doc["build_id"]) oldKey = hashlib.md5(oldKey).hexdigest() client.delete(oldKey, vbucket = 0) #print "DELETED- %s:%s" % (doc["build"],doc["build_id"]) except: pass continue # already have this build results key = "%s-%s" % (doc["name"], doc["build_id"]) key = hashlib.md5(key).hexdigest() val = json.dumps(doc) try: #print val client.set(key, 0, 0, val, 0) buildHist[doc["build"]] = doc["build_id"] except: print "set failed, couchbase down?: %s:%s" % (HOST,PORT) if first_pass: storeJob(jobDoc, bucket, first_pass = False)
mc = MemcachedClient("10.17.12.20", 11210) #for key in keys: # vam.memcached(key).set(key, 1, 0, payload) # total_size += len(key) + len(payload) + 200 #time.sleep(10) #for i in range(0,1023): # mc.set_vbucket_state(i, 'active') new_thread = TapListener(queue, server) new_thread.start() i = 0 while i < 4000: for key in keys: # vam.memcached(key).get(key) mc.set(key, 10, 0, payload, vbucket=0) # for key in keys: # vam.memcached(key).get(key) # mc.set(key, 1, 0, payload, vbucket=0) try: a,b,c = mc.get(key, vbucket=0) # print c except: pass i += 1 # print i #for key in keys: # vam.memcached(key).get(key)
help="number of items") options, args = parser.parse_args() node = options.node sleep_time = int(options.sleep) prefix = str(uuid.uuid4()) number_of_items = int(options.items) mc = MemcachedClient("127.0.0.1", 11211) keys = ["{0}-{1}".format(prefix, i) for i in range(0, number_of_items)] info("inserting {0} items".format(number_of_items)) for k in keys: mc.set(k, 0, 0, str(uuid.uuid4())[0:16]) while True: info("now remove 3 chars from 80% of keys - if < 3 chars delete the key - if key does not exist create it") for i in range(0, 3): for k in keys: try: a, b, value = mc.get(k) if len(value) < 3: mc.delete(k) else: mc.set(k, 0, 0, value[0:len(value) - 7]) except: mc.set(k, 0, 0, str(uuid.uuid4())[0:16]) time.sleep(sleep_time)
def poll(): url = "https://macbuild.hq.couchbase.com/xcode/api/integrations/filter/latest" latest = getJS(url) now = datetime.datetime.now() ts = now.strftime("%Y%m%d") build_no = "%s-%s" % (MOBILE_VERSION, ts[-5:]) client = McdClient(HOST, PORT) client.sasl_auth_plain("mobile", "") buildHist = {} for build in latest: if 'revisionBlueprint' not in latest[build]: continue key = latest[build]['revisionBlueprint'][ 'DVTSourceControlWorkspaceBlueprintPrimaryRemoteRepositoryKey'] rev = latest[build]['revisionBlueprint'][ 'DVTSourceControlWorkspaceBlueprintLocationsKey'][key][ 'DVTSourceControlLocationRevisionKey'] name = "iOS-" + latest[build]['bot']['name'].replace(" ", "") build_id = latest[build]['number'] results = latest[build]['buildResultSummary'] totalCount = results['testsCount'] failCount = results['errorCount'] result = 'SUCCESS' if name in TOTAL_COUNT_LOG: if totalCount == 0: # use historical value for total count totalCount = TOTAL_COUNT_LOG[name] elif totalCount != TOTAL_COUNT_LOG: # update total count log TOTAL_COUNT_LOG[name] = totalCount else: TOTAL_COUNT_LOG[name] = totalCount if (failCount > 0): result = 'UNSTABLE' component = None for feature in MOBILE_FEATURES: tag, _c = feature.split("-") docname = name.upper() docname = docname.replace("-", "_") if tag in docname: component = _c if component: doc = { 'build_id': build_id, 'priority': 'P0', 'name': name, 'url': url, 'component': component, 'failCount': failCount, 'totalCount': totalCount, 'result': result, 'os': 'iOS', 'build': build_no } key = "%s-%s" % (doc["name"], doc["build_id"]) val = json.dumps(doc) try: #print val key = hashlib.md5(key).hexdigest() print val client.set(key, 0, 0, val, 0) buildHist[doc["build"]] = doc["build_id"] except Exception as ex: print ex print "set failed, couchbase down?: %s:%s" % (HOST, PORT)
help="number of items") options, args = parser.parse_args() node = options.node sleep_time = int(options.sleep) prefix = str(uuid.uuid4()) number_of_items = int(options.items) mc = MemcachedClient("127.0.0.1", 11211) keys = ["{0}-{1}".format(prefix, i) for i in range(0, number_of_items)] info("inserting {0} items".format(number_of_items)) for k in keys: mc.set(k, 0, 0, str(uuid.uuid4())[0:16]) while True: info( "now remove 3 chars from 80% of keys - if < 3 chars delete the key - if key does not exist create it" ) for i in range(0, 3): for k in keys: try: a, b, value = mc.get(k) if len(value) < 3: mc.delete(k) else: mc.set(k, 0, 0, value[0:len(value) - 7]) except: mc.set(k, 0, 0, str(uuid.uuid4())[0:16])
def storeJob(jobDoc, bucket, first_pass=True): client = McdClient(HOST, PORT) client.sasl_auth_plain(bucket, "") doc = jobDoc url = doc["url"] res = getJS(url, {"depth": 0}).json() if res is None: return buildHist = {} if res["lastBuild"]: bids = [b["number"] for b in res["builds"]] lastTotalCount = -1 idx = 0 for bid in bids: idx = idx + 1 i = 1 if idx < len(bids): while bids[idx] != bid - i: key = "%s-%s" % (doc["name"], bid - i) key = hashlib.md5(key).hexdigest() try: client.delete(key, vbucket=0) except: pass i = i + 1 if bid in JOBS[doc["name"]]: continue # job already stored else: if first_pass == False: JOBS[doc["name"]].append(bid) doc["build_id"] = bid res = getJS(url + str(bid), {"depth": 0}).json() if res is None: return if "result" not in res: continue doc["result"] = res["result"] doc["duration"] = res["duration"] if bucket == "server": if res["result"] not in [ "SUCCESS", "UNSTABLE", "FAILURE", "ABORTED" ]: continue # unknown result state actions = res["actions"] totalCount = getAction(actions, "totalCount") or 0 failCount = getAction(actions, "failCount") or 0 skipCount = getAction(actions, "skipCount") or 0 if totalCount == 0: if lastTotalCount == -1: continue # no tests ever passed for this build else: totalCount = lastTotalCount failCount = totalCount else: lastTotalCount = totalCount doc["failCount"] = failCount doc["totalCount"] = totalCount - skipCount params = getAction(actions, "parameters") if params is None: doc["priority"] = P1 doc["build"] = DEFAULT_BUILD else: doc["build"] = getAction( params, "name", "version_number") or getAction( params, "name", "cluster_version") or DEFAULT_BUILD doc["priority"] = getAction(params, "name", "priority") or P1 if doc["priority"].upper() not in [P0, P1, P2]: doc["priority"] = P1 doc["build"] = doc["build"].replace("-rel", "").split(",")[0] try: _build = doc["build"].split("-") rel, bno = _build[0], _build[1] # check partial rel #'s rlen = len(rel.split(".")) while rlen < 3: rel = rel + ".0" rlen += 1 # verify rel, build m = re.match("^\d\.\d\.\d{1,5}", rel) if m is None: print "unsupported version_number: " + doc["build"] continue m = re.match("^\d{1,10}", bno) if m is None: print "unsupported version_number: " + doc["build"] continue doc["build"] = "%s-%s" % (rel, bno.zfill(4)) except: print "unsupported version_number: " + doc["build"] continue else: # use date as version for sdk and mobile if res["result"] not in [ "SUCCESS", "UNSTABLE", "FAILURE", "ABORTED" ]: continue actions = res["actions"] totalCount = getAction(actions, "totalCount") or 0 failCount = getAction(actions, "failCount") or 0 skipCount = getAction(actions, "skipCount") or 0 if totalCount == 0: if lastTotalCount == -1: continue # no tests ever passed for this build else: totalCount = lastTotalCount failCount = totalCount else: lastTotalCount = totalCount doc["failCount"] = failCount doc["totalCount"] = totalCount - skipCount doc["priority"] = P0 ts = res["timestamp"] / 1000 month = int(datetime.datetime.fromtimestamp(ts).strftime("%m")) _ts = datetime.datetime.fromtimestamp(ts).strftime("%Y-%m%d") yr, md = _ts.split("-") doc["build"] = "%s-%s%s" % (MOBILE_VERSION, yr[-1], md) if doc["build"] in buildHist: #print "REJECTED- doc already in build results: %s" % doc #print buildHist # attempt to delete if this record has been stored in couchbase try: oldKey = "%s-%s" % (doc["name"], doc["build_id"]) oldKey = hashlib.md5(oldKey).hexdigest() client.delete(oldKey, vbucket=0) #print "DELETED- %s:%s" % (doc["build"],doc["build_id"]) except: pass continue # already have this build results key = "%s-%s" % (doc["name"], doc["build_id"]) key = hashlib.md5(key).hexdigest() val = json.dumps(doc) try: #print val client.set(key, 0, 0, val, 0) buildHist[doc["build"]] = doc["build_id"] except: print "set failed, couchbase down?: %s:%s" % (HOST, PORT) if first_pass: storeJob(jobDoc, bucket, first_pass=False)