def check_dataloss(self, server, bucket): from couchbase.bucket import Bucket from couchbase.exceptions import NotFoundError from lib.memcached.helper.data_helper import VBucketAwareMemcached bkt = Bucket('couchbase://{0}/{1}'.format(server.ip, bucket.name), username="******", password="******") rest = RestConnection(self.master) VBucketAware = VBucketAwareMemcached(rest, bucket.name) _, _, _ = VBucketAware.request_map(rest, bucket.name) batch_start = 0 batch_end = 0 batch_size = 10000 errors = [] while self.num_items > batch_end: batch_end = batch_start + batch_size keys = [] for i in xrange(batch_start, batch_end, 1): keys.append(str(i).rjust(20, '0')) try: bkt.get_multi(keys) self.log.info("Able to fetch keys starting from {0} to {1}".format(keys[0], keys[len(keys)-1])) except Exception as e: self.log.error(e) self.log.info("Now trying keys in the batch one at a time...") key = '' try: for key in keys: bkt.get(key) except NotFoundError: vBucketId = VBucketAware._get_vBucket_id(key) errors.append("Missing key: {0}, VBucketId: {1}". format(key, vBucketId)) batch_start += batch_size return errors
def check_dataloss(self, server, bucket): from couchbase.bucket import Bucket from couchbase.exceptions import NotFoundError from lib.memcached.helper.data_helper import VBucketAwareMemcached bkt = Bucket('couchbase://{0}/{1}'.format(server.ip, bucket.name)) rest = RestConnection(self.master) VBucketAware = VBucketAwareMemcached(rest, bucket.name) _, _, _ = VBucketAware.request_map(rest, bucket.name) batch_start = 0 batch_end = 0 batch_size = 10000 errors = [] while self.num_items > batch_end: batch_end = batch_start + batch_size keys = [] for i in xrange(batch_start, batch_end, 1): keys.append(str(i).rjust(20, '0')) try: bkt.get_multi(keys) self.log.info("Able to fetch keys starting from {0} to {1}".format(keys[0], keys[len(keys)-1])) except Exception as e: self.log.error(e) self.log.info("Now trying keys in the batch one at a time...") key = '' try: for key in keys: bkt.get(key) except NotFoundError: vBucketId = VBucketAware._get_vBucket_id(key) errors.append("Missing key: {0}, VBucketId: {1}". format(key, vBucketId)) batch_start += batch_size return errors
def check_dataloss_for_high_ops_loader(self, server, bucket, items, batch=20000, threads=5, start_document=0, updated=False, ops=0): import subprocess from lib.memcached.helper.data_helper import VBucketAwareMemcached cmd_format = "python scripts/high_ops_doc_loader.py --node {0} --bucket {1} --user {2} --password {3} " \ "--count {4} " \ "--batch_size {5} --threads {6} --start_document {7} --cb_version {8} --validate" cb_version = RestConnection(server).get_nodes_version()[:3] if updated: cmd_format = "{} --updated --ops {}".format(cmd_format, int(ops)) cmd = cmd_format.format(server.ip, bucket.name, server.rest_username, server.rest_password, int(items), batch, threads, start_document, cb_version) self.log.info("Running {}".format(cmd)) result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = result.stdout.read() error = result.stderr.read() errors = [] rest = RestConnection(self.master) VBucketAware = VBucketAwareMemcached(rest, bucket.name) _, _, _ = VBucketAware.request_map(rest, bucket.name) if error: self.log.error(error) self.fail("Failed to run the loadgen validator.") if output: loaded = output.split('\n')[:-1] for load in loaded: if "Missing keys:" in load: keys = load.split(":")[1].strip().replace('[', '').replace(']', '') keys = keys.split(',') for key in keys: key = key.strip() key = key.replace('\'', '').replace('\\', '') vBucketId = VBucketAware._get_vBucket_id(key) errors.append( ("Missing key: {0}, VBucketId: {1}".format(key, vBucketId))) if "Mismatch keys: " in load: keys = load.split(":")[1].strip().replace('[', '').replace(']', '') keys = keys.split(',') for key in keys: key = key.strip() key = key.replace('\'', '').replace('\\', '') vBucketId = VBucketAware._get_vBucket_id(key) errors.append(( "Wrong value for key: {0}, VBucketId: {1}".format( key, vBucketId))) return errors
def check_dataloss(self, server, bucket, num_items): from couchbase.bucket import Bucket from couchbase.exceptions import NotFoundError, CouchbaseError from lib.memcached.helper.data_helper import VBucketAwareMemcached self.log.info( "########## validating data for bucket : {} ###########".format( bucket)) cb_version = cb_version = RestConnection( server).get_nodes_version()[:3] if cb_version < "5": bkt = Bucket('couchbase://{0}/{1}'.format(server.ip, bucket.name), timeout=5000) else: bkt = Bucket('couchbase://{0}/{1}'.format(server.ip, bucket.name), username=server.rest_username, password=server.rest_password, timeout=5000) rest = RestConnection(self.master) VBucketAware = VBucketAwareMemcached(rest, bucket.name) _, _, _ = VBucketAware.request_map(rest, bucket.name) batch_start = 0 batch_end = 0 batch_size = 10000 errors = [] while num_items > batch_end: batch_end = batch_start + batch_size keys = [] for i in xrange(batch_start, batch_end, 1): keys.append(str(i).rjust(20, '0')) try: bkt.get_multi(keys) self.log.info( "Able to fetch keys starting from {0} to {1}".format( keys[0], keys[len(keys) - 1])) except CouchbaseError as e: self.log.error(e) ok, fail = e.split_results() if fail: for key in fail: try: bkt.get(key) except NotFoundError: vBucketId = VBucketAware._get_vBucket_id(key) errors.append( "Missing key: {0}, VBucketId: {1}".format( key, vBucketId)) batch_start += batch_size self.log.info("Total missing keys:{}".format(len(errors))) self.log.info(errors) return errors
def check_dataloss(self, server, bucket, num_items): from couchbase.bucket import Bucket from couchbase.exceptions import NotFoundError,CouchbaseError from lib.memcached.helper.data_helper import VBucketAwareMemcached self.log.info("########## validating data for bucket : {} ###########".format(bucket)) cb_version= cb_version = RestConnection(server).get_nodes_version()[:3] if cb_version < "5": bkt = Bucket('couchbase://{0}/{1}'.format(server.ip, bucket.name),timeout=5000) else: bkt = Bucket('couchbase://{0}/{1}'.format(server.ip, bucket.name),username=server.rest_username, password=server.rest_password,timeout=5000) rest = RestConnection(self.master) VBucketAware = VBucketAwareMemcached(rest, bucket.name) _, _, _ = VBucketAware.request_map(rest, bucket.name) batch_start = 0 batch_end = 0 batch_size = 10000 errors = [] while num_items > batch_end: batch_end = batch_start + batch_size keys = [] for i in xrange(batch_start, batch_end, 1): keys.append(str(i).rjust(20, '0')) try: bkt.get_multi(keys) self.log.info("Able to fetch keys starting from {0} to {1}".format(keys[0], keys[len(keys) - 1])) except CouchbaseError as e: self.log.error(e) ok, fail = e.split_results() if fail: for key in fail: try: bkt.get(key) except NotFoundError: vBucketId = VBucketAware._get_vBucket_id(key) errors.append("Missing key: {0}, VBucketId: {1}". format(key, vBucketId)) batch_start += batch_size self.log.info("Total missing keys:{}".format(len(errors))) self.log.info(errors) return errors
def xdcr_lag_stats(self, interval=5): master = self.clusters[0][0] slave = self.clusters[1][0] src_client = VBucketAwareMemcached(RestConnection(master), self.bucket) dst_client = VBucketAwareMemcached(RestConnection(slave), self.bucket) log.info("started xdcr lag measurements") self._task["xdcr_lag"] = list() while not self._aborted(): single_stats = self._get_xdcr_latency(src_client, dst_client) multi_stats = self._get_xdcr_latency(src_client, dst_client, True) multi_stats.update(single_stats) self._task['xdcr_lag'].append(multi_stats) time.sleep(interval) filename = time.strftime("%Y%m%d_%H%M%S_xdcr_lag.json", time.localtime()) with open(filename, "w") as fh: fh.write( json.dumps(self._task['xdcr_lag'], indent=4, sort_keys=True)) log.info("finished xdcr lag measurements")
def check_dataloss_for_high_ops_loader(self, server, bucket, items, batch=20000, threads=5, start_document=0, updated=False, ops=0): import subprocess from lib.memcached.helper.data_helper import VBucketAwareMemcached cmd_format = "python scripts/high_ops_doc_loader.py --node {0} --bucket {1} --user {2} --password {3} " \ "--count {4} " \ "--batch_size {5} --threads {6} --start_document {7} --cb_version {8} --validate" cb_version = RestConnection(server).get_nodes_version()[:3] if updated: cmd_format = "{} --updated --ops {}".format(cmd_format, int(ops)) cmd = cmd_format.format(server.ip, bucket.name, server.rest_username, server.rest_password, int(items), batch, threads, start_document, cb_version) self.log.info("Running {}".format(cmd)) result = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = result.stdout.read() error = result.stderr.read() errors = [] rest = RestConnection(self.master) VBucketAware = VBucketAwareMemcached(rest, bucket.name) _, _, _ = VBucketAware.request_map(rest, bucket.name) if error: self.log.error(error) self.fail("Failed to run the loadgen validator.") if output: loaded = output.split('\n')[:-1] for load in loaded: if "Missing keys:" in load: keys = load.split(":")[1].strip().replace('[', '').replace( ']', '') keys = keys.split(',') for key in keys: key = key.strip() key = key.replace('\'', '').replace('\\', '') vBucketId = VBucketAware._get_vBucket_id(key) errors.append( ("Missing key: {0}, VBucketId: {1}".format( key, vBucketId))) if "Mismatch keys: " in load: keys = load.split(":")[1].strip().replace('[', '').replace( ']', '') keys = keys.split(',') for key in keys: key = key.strip() key = key.replace('\'', '').replace('\\', '') vBucketId = VBucketAware._get_vBucket_id(key) errors.append( ("Wrong value for key: {0}, VBucketId: {1}".format( key, vBucketId))) return errors