コード例 #1
0
    def key_not_exists_test(self):
        client = SDKClient(hosts = [self.master.ip], bucket = "default")
        KEY_NAME = 'key'

        for i in range(1500):
            client.set(KEY_NAME, "x")
            #For some reason could not get delete to work
            client.remove(KEY_NAME)
            rc = client.get(KEY_NAME)
            #.get is automatically set to quiet for the sdk_client, therefore I look for
            #none to indicate an error, otherwise the sdk_client spends 10 seconds trying
            #to retry the commands and is very slow
            if rc[2] == None:
                pass
            else:
                assert False
            #cas errors do not sleep the test for 10 seconds, plus we need to check that the correct
            #error is being thrown
            try:
                #For some reason replace instead of cas would not reproduce the bug
                client.cas(KEY_NAME, "value", cas = 10)
            except NotFoundError:
                pass
        assert True 
コード例 #2
0
ファイル: lww_stats.py プロジェクト: membase/testrunner
    def test_poisoned_cas(self):

        self.log.info("starting test_poisoned_cas")

        """
        - set the clock ahead
        - do lots of sets and get some CASs
        - do a set and get the CAS (flag, CAS, value) and save it
        - set the clock back
        - verify the CAS is still big on new sets
        - reset the CAS
        - do the vbucket max cas and verify
        - do a new mutation and verify the CAS is smaller


        """

        sdk_client = SDKClient(scheme="couchbase", hosts=[self.servers[0].ip], bucket=self.buckets[0].name)
        mc_client = MemcachedClientHelper.direct_client(self.servers[0], self.buckets[0])
        shell = RemoteMachineShellConnection(self.servers[0])

        # move the system clock ahead to poison the CAS
        shell = RemoteMachineShellConnection(self.servers[0])
        self.assertTrue(shell.change_system_time(LWWStatsTests.ONE_HOUR_IN_SECONDS), "Failed to advance the clock")

        output, error = shell.execute_command("date")
        self.log.info("Date after is set forward {0}".format(output))

        rc = sdk_client.set("key1", "val1")
        rc = mc_client.get("key1")
        poisoned_cas = rc[1]
        self.log.info("The poisoned CAS is {0}".format(poisoned_cas))

        # do lots of mutations to set the max CAS for all vbuckets

        gen_load = BlobGenerator("key-for-cas-test", "value-for-cas-test-", self.value_size, end=10000)
        self._load_all_buckets(self.master, gen_load, "create", 0)

        # move the clock back again and verify the CAS stays large
        self.assertTrue(shell.change_system_time(-LWWStatsTests.ONE_HOUR_IN_SECONDS), "Failed to change the clock")
        output, error = shell.execute_command("date")
        self.log.info("Date after is set backwards {0}".format(output))

        use_mc_bin_client = self.input.param("use_mc_bin_client", False)

        if use_mc_bin_client:
            rc = mc_client.set("key2", 0, 0, "val2")
            second_poisoned_cas = rc[1]
        else:
            rc = sdk_client.set("key2", "val2")
            second_poisoned_cas = rc.cas
        self.log.info("The second_poisoned CAS is {0}".format(second_poisoned_cas))
        self.assertTrue(
            second_poisoned_cas > poisoned_cas,
            "Second poisoned CAS {0} is not larger than the first poisoned cas".format(
                second_poisoned_cas, poisoned_cas
            ),
        )

        # reset the CAS for all vbuckets. This needs to be done in conjunction with a clock change. If the clock is not
        # changed then the CAS will immediately continue with the clock. I see two scenarios:
        # 1. Set the clock back 1 hours and the CAS back 30 minutes, the CAS should be used
        # 2. Set the clock back 1 hour, set the CAS back 2 hours, the clock should be use

        # do case 1, set the CAS back 30 minutes.  Calculation below assumes the CAS is in nanoseconds
        earlier_max_cas = poisoned_cas - 30 * 60 * 1000000000
        for i in range(self.vbuckets):
            output, error = shell.execute_cbepctl(
                self.buckets[0], "", "set_vbucket_param", "max_cas ", str(i) + " " + str(earlier_max_cas)
            )
            if len(error) > 0:
                self.fail("Failed to set the max cas")

        # verify the max CAS

        for i in range(self.vbuckets):
            max_cas = int(mc_client.stats("vbucket-details")["vb_" + str(i) + ":max_cas"])
            self.assertTrue(
                max_cas == earlier_max_cas,
                "Max CAS not properly set for vbucket {0} set as {1} and observed {2}".format(
                    i, earlier_max_cas, max_cas
                ),
            )
            self.log.info("Per cbstats the max cas for bucket {0} is {1}".format(i, max_cas))

        rc1 = sdk_client.set("key-after-resetting cas", "val1")
        rc2 = mc_client.get("key-after-resetting cas")
        set_cas_after_reset_max_cas = rc2[1]
        self.log.info("The later CAS is {0}".format(set_cas_after_reset_max_cas))
        self.assertTrue(
            set_cas_after_reset_max_cas < poisoned_cas,
            "For {0} CAS has not decreased. Current CAS {1} poisoned CAS {2}".format(
                "key-after-resetting cas", set_cas_after_reset_max_cas, poisoned_cas
            ),
        )

        # do a bunch of sets and verify the CAS is small - this is really only one set, need to do more

        gen_load = BlobGenerator(
            "key-for-cas-test-after-cas-is-reset", "value-for-cas-test-", self.value_size, end=1000
        )
        self._load_all_buckets(self.master, gen_load, "create", 0)

        gen_load.reset()
        while gen_load.has_next():
            key, value = gen_load.next()
            try:
                rc = mc_client.get(key)
                # rc = sdk_client.get(key)
                cas = rc[1]
                self.assertTrue(
                    cas < poisoned_cas,
                    "For key {0} CAS has not decreased. Current CAS {1} poisoned CAS {2}".format(
                        key, cas, poisoned_cas
                    ),
                )
            except:
                self.log.info("get error with {0}".format(key))

        rc = sdk_client.set("key3", "val1")
        better_cas = rc.cas

        self.log.info("The better CAS is {0}".format(better_cas))

        self.assertTrue(better_cas < poisoned_cas, "The CAS was not improved")

        # set the clock way ahead - remote_util_OS.py (new)
        # do a bunch of mutations - not really needed
        # do the fix command - cbepctl, the existing way (remote util)

        # do some mutations, verify they conform to the new CAS - build on the CAS code,
        #     where to iterate over the keys and get the CAS?
        """
コード例 #3
0
ファイル: lww_stats.py プロジェクト: membase/testrunner
    def test_drift_stats(self):

        # An exercise in filling out the matrix with the right amount of code,
        # we want to test (ahead,behind) and (setwithmeta, deleteWithmeta) and (active,replica).
        # So for now let's do the set/del in sequences

        self.log.info("starting test_drift_stats")

        check_ahead_threshold = self.input.param("check_ahead_threshold", True)

        self.log.info("Checking the ahead threshold? {0}".format(check_ahead_threshold))

        sdk_client = SDKClient(scheme="couchbase", hosts=[self.servers[0].ip], bucket=self.buckets[0].name)
        mc_client = MemcachedClientHelper.direct_client(self.servers[0], self.buckets[0])
        shell = RemoteMachineShellConnection(self.servers[0])

        # get the current time
        rc = sdk_client.set("key1", "val1")
        current_time_cas = rc.cas

        test_key = "test-set-with-metaxxxx"
        vbId = (((zlib.crc32(test_key)) >> 16) & 0x7FFF) & (self.vbuckets - 1)

        # verifying the case where we are within the threshold, do a set and del, neither should trigger
        rc = mc_client.setWithMeta(test_key, "test-value", 0, 0, 0, current_time_cas)
        # rc = mc_client.setWithMetaLWW(test_key, 'test-value', 0, 0, current_time_cas)
        rc = mc_client.delWithMetaLWW(test_key, 0, 0, current_time_cas + 1)

        vbucket_stats = mc_client.stats("vbucket-details")
        ahead_exceeded = int(vbucket_stats["vb_" + str(vbId) + ":drift_ahead_threshold_exceeded"])
        self.assertTrue(ahead_exceeded == 0, "Ahead exceeded expected is 0 but is {0}".format(ahead_exceeded))

        behind_exceeded = int(vbucket_stats["vb_" + str(vbId) + ":drift_behind_threshold_exceeded"])
        self.assertTrue(behind_exceeded == 0, "Behind exceeded expected is 0 but is {0}".format(behind_exceeded))

        # out of curiousity, log the total counts
        self.log.info(
            "Total stats: total abs drift {0} and total abs drift count {1}".format(
                vbucket_stats["vb_" + str(vbId) + ":total_abs_drift"],
                vbucket_stats["vb_" + str(vbId) + ":total_abs_drift_count"],
            )
        )

        # do the ahead set with meta case - verify: ahead threshold exceeded, total_abs_drift count and abs_drift
        if check_ahead_threshold:
            stat_descriptor = "ahead"
            cas = current_time_cas + 5000 * LWWStatsTests.DEFAULT_THRESHOLD

        else:
            stat_descriptor = "behind"
            cas = current_time_cas - 5000 * LWWStatsTests.DEFAULT_THRESHOLD

        rc = mc_client.setWithMeta(test_key, "test-value", 0, 0, 0, cas)
        rc = mc_client.delWithMetaLWW(test_key, 0, 0, cas + 1)

        # verify the vbucket stats
        vbucket_stats = mc_client.stats("vbucket-details")
        drift_counter_stat = "vb_" + str(vbId) + ":drift_" + stat_descriptor + "_threshold_exceeded"
        threshold_exceeded = int(mc_client.stats("vbucket-details")[drift_counter_stat])
        # MB-21450 self.assertTrue( ahead_exceeded == 2, '{0} exceeded expected is 1 but is {1}'.
        # format( stat_descriptor, threshold_exceeded))

        self.log.info(
            "Total stats: total abs drift {0} and total abs drift count {1}".format(
                vbucket_stats["vb_" + str(vbId) + ":total_abs_drift"],
                vbucket_stats["vb_" + str(vbId) + ":total_abs_drift_count"],
            )
        )

        # and verify the bucket stats: ep_active_hlc_drift_count, ep_clock_cas_drift_threshold_exceeded,
        # ep_active_hlc_drift
        bucket_stats = mc_client.stats()
        ep_active_hlc_drift_count = int(bucket_stats["ep_active_hlc_drift_count"])
        ep_clock_cas_drift_threshold_exceeded = int(bucket_stats["ep_clock_cas_drift_threshold_exceeded"])
        ep_active_hlc_drift = int(bucket_stats["ep_active_hlc_drift"])

        # Drift count appears to be the number of mutations
        self.assertTrue(ep_active_hlc_drift_count > 0, "ep_active_hlc_drift_count is 0, expected a positive value")

        # drift itself is the sum of the absolute values of all drifts, so check that it is greater than 0
        self.assertTrue(ep_active_hlc_drift > 0, "ep_active_hlc_drift is 0, expected a positive value")

        # the actual drift count is a little more granular
        expected_drift_threshold_exceed_count = 2
        self.assertTrue(
            expected_drift_threshold_exceed_count == ep_clock_cas_drift_threshold_exceeded,
            "ep_clock_cas_drift_threshold_exceeded is incorrect. Expected {0}, actual {1}".format(
                expected_drift_threshold_exceed_count, ep_clock_cas_drift_threshold_exceeded
            ),
        )
コード例 #4
0
    def test_poisoned_cas(self):
        """
        @note:  - set the clock ahead
                - do lots of sets and get some CASs
                - do a set and get the CAS (flag, CAS, value) and save it
                - set the clock back
                - verify the CAS is still big on new sets
                - reset the CAS
                - do the vbucket max cas and verify
                - do a new mutation and verify the CAS is smaller
        """
        #creating a user 'default' for the bucket
        self.log.info('starting test_poisoned_cas')
        payload = "name={0}&roles=admin&password=password".format(
            self.buckets[0].name)
        self.rest.add_set_builtin_user(self.buckets[0].name, payload)
        sdk_client = SDKClient(scheme='couchbase', hosts = [self.servers[0].ip], bucket = self.buckets[0].name)
        mc_client = MemcachedClientHelper.direct_client(self.servers[0], self.buckets[0])
        # move the system clock ahead to poison the CAS
        shell = RemoteMachineShellConnection(self.servers[0])
        self.assertTrue(  shell.change_system_time( LWWStatsTests.ONE_HOUR_IN_SECONDS ), 'Failed to advance the clock')

        output, error = shell.execute_command('date')
        self.log.info('Date after is set forward {0}'.format( output ))
        rc = sdk_client.set('key1', 'val1')
        rc = mc_client.get('key1' )
        poisoned_cas = rc[1]
        self.log.info('The poisoned CAS is {0}'.format(poisoned_cas))
        # do lots of mutations to set the max CAS for all vbuckets
        gen_load  = BlobGenerator('key-for-cas-test', 'value-for-cas-test-', self.value_size, end=10000)
        self._load_all_buckets(self.master, gen_load, "create", 0)
        # move the clock back again and verify the CAS stays large
        self.assertTrue(  shell.change_system_time( -LWWStatsTests.ONE_HOUR_IN_SECONDS ), 'Failed to change the clock')
        output, error = shell.execute_command('date')
        self.log.info('Date after is set backwards {0}'.format( output))
        use_mc_bin_client = self.input.param("use_mc_bin_client", True)

        if use_mc_bin_client:
            rc = mc_client.set('key2', 0, 0, 'val2')
            second_poisoned_cas = rc[1]
        else:
            rc = sdk_client.set('key2', 'val2')
            second_poisoned_cas = rc.cas
        self.log.info('The second_poisoned CAS is {0}'.format(second_poisoned_cas))
        self.assertTrue(  second_poisoned_cas > poisoned_cas,
                'Second poisoned CAS {0} is not larger than the first poisoned cas'.format(second_poisoned_cas, poisoned_cas))
        # reset the CAS for all vbuckets. This needs to be done in conjunction with a clock change. If the clock is not
        # changed then the CAS will immediately continue with the clock. I see two scenarios:
        # 1. Set the clock back 1 hours and the CAS back 30 minutes, the CAS should be used
        # 2. Set the clock back 1 hour, set the CAS back 2 hours, the clock should be use
        # do case 1, set the CAS back 30 minutes.  Calculation below assumes the CAS is in nanoseconds
        earlier_max_cas = poisoned_cas - 30 * 60 * 1000000000
        for i in range(self.vbuckets):
            output, error = shell.execute_cbepctl(self.buckets[0], "", "set_vbucket_param",
                              "max_cas ", str(i) + ' ' + str(earlier_max_cas)  )
            if len(error) > 0:
                self.fail('Failed to set the max cas')
        # verify the max CAS
        for i in range(self.vbuckets):
            max_cas = int( mc_client.stats('vbucket-details')['vb_' + str(i) + ':max_cas'] )
            self.assertTrue(max_cas == earlier_max_cas,
                    'Max CAS not properly set for vbucket {0} set as {1} and observed {2}'.format(i, earlier_max_cas, max_cas ) )
            self.log.info('Per cbstats the max cas for bucket {0} is {1}'.format(i, max_cas) )

        rc1 = sdk_client.set('key-after-resetting cas', 'val1')
        rc2 = mc_client.get('key-after-resetting cas' )
        set_cas_after_reset_max_cas = rc2[1]
        self.log.info('The later CAS is {0}'.format(set_cas_after_reset_max_cas))
        self.assertTrue( set_cas_after_reset_max_cas < poisoned_cas,
             'For {0} CAS has not decreased. Current CAS {1} poisoned CAS {2}'.format('key-after-resetting cas', set_cas_after_reset_max_cas, poisoned_cas))
        # do a bunch of sets and verify the CAS is small - this is really only one set, need to do more
        gen_load  = BlobGenerator('key-for-cas-test-after-cas-is-reset', 'value-for-cas-test-', self.value_size, end=1000)
        self._load_all_buckets(self.master, gen_load, "create", 0)
        gen_load.reset()
        while gen_load.has_next():
            key, value = next(gen_load)
            try:
                rc = mc_client.get( key )
                #rc = sdk_client.get(key)
                cas = rc[1]
                self.assertTrue( cas < poisoned_cas, 'For key {0} CAS has not decreased. Current CAS {1} poisoned CAS {2}'.format(key, cas, poisoned_cas))
            except:
                self.log.info('get error with {0}'.format(key))

        rc = sdk_client.set('key3', 'val1')
        better_cas = rc.cas
        self.log.info('The better CAS is {0}'.format(better_cas))
        self.assertTrue( better_cas < poisoned_cas, 'The CAS was not improved')
        # set the clock way ahead - remote_util_OS.py (new)
        # do a bunch of mutations - not really needed
        # do the fix command - cbepctl, the existing way (remote util)
        # do some mutations, verify they conform to the new CAS - build on the CAS code,
        #     where to iterate over the keys and get the CAS?
        """
コード例 #5
0
    def test_drift_stats(self):
        '''
        @note: An exercise in filling out the matrix with the right amount of code,
               we want to test (ahead,behind) and (setwithmeta, deleteWithmeta)
               and (active,replica).
               So for now let's do the set/del in sequences
        '''
        self.log.info('starting test_drift_stats')
        #Creating a user with the bucket name having admin access
        payload = "name={0}&roles=admin&password=password".format(
            self.buckets[0].name)
        self.rest.add_set_builtin_user(self.buckets[0].name, payload)
        check_ahead_threshold = self.input.param("check_ahead_threshold",
                                                 True)

        self.log.info('Checking the ahead threshold? {0}'.format(
            check_ahead_threshold))

        sdk_client = SDKClient(scheme='couchbase',
                               hosts = [self.servers[0].ip],
                               bucket = self.buckets[0].name)
        mc_client = MemcachedClientHelper.direct_client(self.servers[0],
                                                        self.buckets[0])
        shell = RemoteMachineShellConnection(self.servers[0])

        # get the current time
        rc = sdk_client.set('key1', 'val1')
        current_time_cas = rc.cas

        test_key = 'test-set-with-metaxxxx'
        vbId = (((zlib.crc32(test_key.encode())) >> 16) & 0x7fff) & (self.vbuckets- 1)

        #import pdb;pdb.set_trace()
        # verifying the case where we are within the threshold, do a set and del, neither should trigger
        #mc_active.setWithMeta(key, '123456789', 0, 0, 123, cas)
        rc = mc_client.setWithMeta(test_key, 'test-value',
                                   0, 0, 1, current_time_cas)
        #rc = mc_client.setWithMetaLWW(test_key, 'test-value', 0, 0, current_time_cas)
        #rc = mc_client.delWithMetaLWW(test_key, 0, 0, current_time_cas+1)

        vbucket_stats = mc_client.stats('vbucket-details')
        ahead_exceeded  = int(vbucket_stats['vb_' + str(vbId) + ':drift_ahead_threshold_exceeded'])
        self.assertTrue(ahead_exceeded == 0,
                        'Ahead exceeded expected is 0 but is {0}'.format( ahead_exceeded))
        behind_exceeded  = int( vbucket_stats['vb_' + str(vbId) + ':drift_behind_threshold_exceeded'] )
        self.assertTrue( behind_exceeded == 0, 'Behind exceeded expected is 0 but is {0}'.format( behind_exceeded))
        # out of curiousity, log the total counts
        self.log.info('Total stats: total abs drift {0} and total abs drift count {1}'.
                      format(vbucket_stats['vb_' + str(vbId) + ':total_abs_drift'],
                             vbucket_stats['vb_' + str(vbId) + ':total_abs_drift_count']))

        # do the ahead set with meta case - verify: ahead threshold exceeded, total_abs_drift count and abs_drift
        if check_ahead_threshold:
            stat_descriptor = 'ahead'
            cas = current_time_cas + 5000 * LWWStatsTests.DEFAULT_THRESHOLD

        else:
            stat_descriptor = 'behind'
            cas = current_time_cas -(5000 * LWWStatsTests.DEFAULT_THRESHOLD)
        rc = mc_client.setWithMeta(test_key, 'test-value', 0, 0, 0, cas)
        #rc = mc_client.delWithMetaLWW(test_key, 0, 0, cas+1)
        # verify the vbucket stats
        vbucket_stats = mc_client.stats('vbucket-details')
        drift_counter_stat = 'vb_' + str(vbId) + ':drift_' + stat_descriptor + '_threshold_exceeded'
        threshold_exceeded  = int( mc_client.stats('vbucket-details')[drift_counter_stat] )
        # MB-21450 self.assertTrue( ahead_exceeded == 2, '{0} exceeded expected is 1 but is {1}'.
        # format( stat_descriptor, threshold_exceeded))

        self.log.info('Total stats: total abs drift {0} and total abs drift count {1}'.
                      format(vbucket_stats['vb_' + str(vbId) + ':total_abs_drift'],
                             vbucket_stats['vb_' + str(vbId) + ':total_abs_drift_count']))

        # and verify the bucket stats: ep_active_hlc_drift_count, ep_clock_cas_drift_threshold_exceeded,
        # ep_active_hlc_drift
        bucket_stats = mc_client.stats()
        ep_active_hlc_drift_count = int(bucket_stats['ep_active_hlc_drift_count'])
        ep_clock_cas_drift_threshold_exceeded = int(bucket_stats['ep_clock_cas_drift_threshold_exceeded'])
        ep_active_hlc_drift = int(bucket_stats['ep_active_hlc_drift'])

        # Drift count appears to be the number of mutations
        self.assertTrue( ep_active_hlc_drift_count > 0, 'ep_active_hlc_drift_count is 0, expected a positive value')

        # drift itself is the sum of the absolute values of all drifts, so check that it is greater than 0
        self.assertTrue( ep_active_hlc_drift > 0, 'ep_active_hlc_drift is 0, expected a positive value')

        # the actual drift count is a little more granular
        expected_drift_threshold_exceed_count = 1
        self.assertTrue( expected_drift_threshold_exceed_count == ep_clock_cas_drift_threshold_exceeded,
                         'ep_clock_cas_drift_threshold_exceeded is incorrect. Expected {0}, actual {1}'.
                             format(expected_drift_threshold_exceed_count,
                                    ep_clock_cas_drift_threshold_exceeded) )
コード例 #6
0
class SubdocSimpleDataset(SubdocBaseTest):
    def setUp(self):
        super(SubdocSimpleDataset, self).setUp()
        self.client = self.direct_client(self.master, self.buckets[0])

    def tearDown(self):
        super(SubdocSimpleDataset, self).tearDown()

    def test_system_xattr_with_compression(self):
        # MB-34346
        #subdoc.subdoc_simple_dataset.SubdocSimpleDataset.test_system_xattr_with_compression,compression_mode=active,use_sdk_client=True,value_size=262114
        KEY = "key"
        self.value_size = self.input.param("value_size", 102400)
        self.log.info("Insert a key and set xattr for the key")

        val = self.generate_random_json_doc(self.value_size)
        if random.choice([True, False]):
            self.client.insert(KEY, val, 60)
        else:
            self.client.insert(KEY, {}, 60)
        rv = self.client.cb.mutate_in(
            KEY, SD.upsert('_system1', val, xattr=True, create_parents=True))
        self.assertTrue(rv.success)
        rv = self.client.cb.mutate_in(
            KEY,
            SD.upsert('_system2', {
                'field1': val,
                'field2': val
            },
                      xattr=True,
                      create_parents=True))
        self.assertTrue(rv.success)
        rv = self.client.cb.mutate_in(
            KEY,
            SD.upsert('a', {
                'field1': {
                    'sub_field1a': 0,
                    'sub_field1b': 00
                },
                'field2': {
                    'sub_field2a': 20,
                    'sub_field2b': 200
                }
            },
                      xattr=True,
                      create_parents=True))
        self.assertTrue(rv.success)

        self.client.upsert(KEY, value={}, ttl=20)
        self.sleep(30)
        try:
            self.client.get(KEY)
        except MemcachedError as exp:
            self.assertEqual(exp.status, 1)

    def generate_random_json_doc(self, value_size=10240):
        age = range(1, 100)
        name = [
            'a' * value_size,
        ]
        template = {"age": age, "name": name}
        json_string = json.dumps(template)
        return json_string

    # Test the fix for MB-30278
    def test_verify_backtick(self):
        result = True
        dict = {}
        self.key = "verify_backtick"
        array = {
            "name`": "Douglas Reynholm",
            "place": "India",
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)

        # Insert double backtick(``) to refer a literal backtick(`) in key
        for count in range(5):
            key1 = 'name``'
            try:
                opaque, cas, data = self.client.get_sd(self.key, key1)
                data = json.loads(data)
                if data != array["name`"]:
                    self.fail("Data does not match")
            except Exception as e:
                self.log(
                    "Unable to get key {} for path {} after {} tries".format(
                        self.key, key1, count))
                result = False

        self.assertTrue(result, dict)

    # Test the fix for MB-31070
    def test_expiry_after_append(self):
        # create a doc and set expiry for the doc
        # append to the doc and check if the expiry is not changed

        self.key = "expiry_after_append"
        array = {
            "name": "Douglas Reynholm",
            "place": "India",
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 60, 0, jsonDump)
        client1 = VBucketAwareMemcached(RestConnection(self.master), 'default')
        get_meta_resp_before = client1.generic_request(
            client1.memcached(self.key).getMeta, self.key)
        self.log.info("Sleeping for 5 sec")
        time.sleep(5)
        client1.generic_request(
            client1.memcached(self.key).append, self.key, 'appended data')
        get_meta_resp_after = client1.generic_request(
            client1.memcached(self.key).getMeta, self.key)
        self.assertEquals(get_meta_resp_before[2],
                          get_meta_resp_after[2])  # 3rd value is expiry value

#SD_COUNTER

    def test_counter(self):
        result = True
        dict = {}
        self.key = "test_counter"
        array = {
            "add_integer": 0,
            "sub_integer": 1,
            "add_double": 0.0,
            "sub_double": 0.0,
            "array_add_integer": [0, 1],
            "array_sub_integer": [0, 1],
        }
        expected_array = {
            "add_integer": 1,
            "sub_integer": 0,
            "add_double": 1.0,
            "sub_double": 0.0,
            "array_add_integer": [1, 1],
            "array_sub_integer": [0, 0],
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)
        self.counter(self.client,
                     key="test_counter",
                     path='add_integer',
                     value="1")
        self.counter(self.client,
                     key="test_counter",
                     path='sub_integer',
                     value="-1")
        #self.counter(self.client, key = "test_counter", path = 'add_double', value = "1.0")
        #self.counter(self.client, key = "test_counter", path = 'sub_double', value = "-1.0")
        self.counter(self.client,
                     key="test_counter",
                     path='array_add_integer[0]',
                     value="-1.0")
        self.counter(self.client,
                     key="test_counter",
                     path='array_add_integer[1]',
                     value="-1.0")
        self.json = expected_array
        for key in expected_array.keys():
            value = expected_array[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {
                    "expected": expected_array[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, dict)

# SD_GET

    def test_get_numbers(self):
        self.json = self.generate_simple_data_numbers()
        self.get_verify(self.json, "simple_dataset_numbers")

    def test_get_null(self):
        self.json = self.generate_simple_data_null()
        self.get_verify(self.json, "simple_dataset_null")

    def test_get_boolean(self):
        self.json = self.generate_simple_data_boolean()
        self.get_verify(self.json, "simple_dataset_boolean")

    def test_get_array_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.get_verify(self.json, "simple_dataset_array_numbers")

    def test_get_array_strings(self):
        self.json = self.generate_simple_data_strings()
        self.get_verify(self.json, "generate_simple_data_array_strings")

    def test_get_mix_arrays(self):
        self.json = self.generate_simple_data_mix_arrays()
        self.get_verify(self.json, "generate_simple_data_mix_arrays")

    def test_get_numbers_boundary(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.get_verify(self.json, "generate_simple_data_numbers_boundary")

    def test_get_element_arrays(self):
        self.key = "element_arrays"
        self.json = self.generate_simple_arrays()
        jsonDump = json.dumps(self.json)
        self.client.set(self.key, 0, 0, jsonDump)
        key_single_dimension_path = "single_dimension_array[0]"
        self.get_and_verify_with_value(
            self.client, self.key, key_single_dimension_path,
            str(self.json["single_dimension_array"][0]))
        key_two_dimension_path = "two_dimension_array[0][0]"
        self.get_and_verify_with_value(
            self.client, self.key, key_two_dimension_path,
            str(self.json["two_dimension_array"][0][0]))
        self.assertTrue(result, dict)

# SD_ARRAY_ADD

    def test_add_last_array(self):
        result = True
        dict = {}
        self.key = "test_add_last_array"
        array = {
            "empty": [],
            "single_dimension_array": ["0"],
            "two_dimension_array": [["0"]],
            "three_dimension_array": [[["0"]]]
        }
        expected_array = {
            "empty": ["1"],
            "single_dimension_array": ["0", "1"],
            "two_dimension_array": [["0", "1"]],
            "three_dimension_array": [[["0", "1"]]]
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)
        self.array_add_last(self.client,
                            key="test_add_last_array",
                            path='empty',
                            value=json.dumps("1"))
        self.array_add_last(self.client,
                            key="test_add_last_array",
                            path='single_dimension_array',
                            value=json.dumps("1"))
        self.array_add_last(self.client,
                            key="test_add_last_array",
                            path='two_dimension_array[0]',
                            value=json.dumps("1"))
        self.array_add_last(self.client,
                            key="test_add_last_array",
                            path='three_dimension_array[0][0]',
                            value=json.dumps("1"))
        self.json = expected_array
        for key in expected_array.keys():
            value = expected_array[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {
                    "expected": expected_array[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, dict)

    def test_add_first_array(self):
        result = True
        dict = {}
        self.key = "test_add_first_array"
        array = {
            "empty": [],
            "single_dimension_array": ["1"],
            "two_dimension_array": [["1"]],
            "three_dimension_array": [[["1"]]]
        }
        expected_array = {
            "empty": ["0"],
            "single_dimension_array": ["0", "1"],
            "two_dimension_array": [["0", "1"]],
            "three_dimension_array": [[["0", "1"]]]
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)
        self.array_add_first(self.client,
                             key="test_add_first_array",
                             path='empty',
                             value=json.dumps("0"))
        self.array_add_first(self.client,
                             key="test_add_first_array",
                             path='single_dimension_array',
                             value=json.dumps("0"))
        self.array_add_first(self.client,
                             key="test_add_first_array",
                             path='two_dimension_array[0]',
                             value=json.dumps("0"))
        self.array_add_first(self.client,
                             key="test_add_first_array",
                             path='three_dimension_array[0][0]',
                             value=json.dumps("0"))
        self.json = expected_array
        for key in expected_array.keys():
            value = expected_array[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {
                    "expected": expected_array[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, dict)

    def test_add_unique_array(self):
        result = True
        dict = {}
        self.key = "test_add_unique_array"
        array = {
            "empty": [],
            "single_dimension_array": ["0", 2],
            "two_dimension_array": [["0", 2]],
            "three_dimension_array": [[["0", 2]]]
        }
        expected_array = {
            "empty": ["1"],
            "single_dimension_array": ["0", 2, "1"],
            "two_dimension_array": [["0", 2, "1"]],
            "three_dimension_array": [[["0", 2, "1"]]]
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)
        self.array_add_unique(self.client,
                              key="test_add_unique_array",
                              path='empty',
                              value=json.dumps("1"))
        self.array_add_unique(self.client,
                              key="test_add_unique_array",
                              path='single_dimension_array',
                              value=json.dumps("1"))
        self.array_add_unique(self.client,
                              key="test_add_unique_array",
                              path='two_dimension_array[0]',
                              value=json.dumps("1"))
        self.array_add_unique(self.client,
                              key="test_add_unique_array",
                              path='three_dimension_array[0][0]',
                              value=json.dumps("1"))
        self.json = expected_array
        for key in expected_array.keys():
            value = expected_array[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {
                    "expected": expected_array[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, dict)

    def test_add_insert_array(self):
        result = True
        dict = {}
        self.key = "test_add_insert_array"
        array = {
            "single_dimension_array_no_element": [],
            "two_dimension_array_no_element": [[]],
            "three_dimension_array_no_element": [[[]]]
        }
        expected_array = {
            "single_dimension_array_no_element": [0, 1, 2, 3],
            "two_dimension_array_no_element": [[0, 1, 2, 3], [0, 1, 2, 3]],
            "three_dimension_array_no_element": [[[0, 1, 2, 3], [0, 1, 2, 3]],
                                                 [0, 1, 2, 3]],
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="single_dimension_array_no_element[0]",
                              value=json.dumps(1))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="single_dimension_array_no_element[0]",
                              value=json.dumps(0))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="single_dimension_array_no_element[2]",
                              value=json.dumps(3))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="single_dimension_array_no_element[2]",
                              value=json.dumps(2))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="two_dimension_array_no_element[0][0]",
                              value=json.dumps(1))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="two_dimension_array_no_element[0][0]",
                              value=json.dumps(0))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="two_dimension_array_no_element[0][2]",
                              value=json.dumps(3))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="two_dimension_array_no_element[0][2]",
                              value=json.dumps(2))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="two_dimension_array_no_element[1]",
                              value=json.dumps([0, 1, 2, 3]))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="three_dimension_array_no_element[0][0][0]",
                              value=json.dumps(1))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="three_dimension_array_no_element[0][0][0]",
                              value=json.dumps(0))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="three_dimension_array_no_element[0][0][2]",
                              value=json.dumps(3))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="three_dimension_array_no_element[0][0][2]",
                              value=json.dumps(2))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="three_dimension_array_no_element[0][1]",
                              value=json.dumps([0, 1, 2, 3]))
        self.array_add_insert(self.client,
                              key="test_add_insert_array",
                              path="three_dimension_array_no_element[1]",
                              value=json.dumps([0, 1, 2, 3]))
        self.json = expected_array
        for key in expected_array.keys():
            value = expected_array[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {
                    "expected": expected_array[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, dict)

# SD_ADD

    def test_add_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_add_verify(self.json, "test_add_numbers")

    def test_add_array_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_add_verify(self.json, "test_add_array_of_numbers")

    def test_add_numbers_boundary(self):
        self.json = self.generate_simple_data_numbers()
        self.dict_add_verify(self.json, "test_add_numbers_boundary")

    def test_add_strings(self):
        self.json = self.generate_simple_data_strings()
        self.dict_add_verify(self.json, "test_add_string")

    def test_add_array_strings(self):
        self.json = self.generate_simple_data_array_strings()
        self.dict_add_verify(self.json, "test_add_array_strings")

    def test_add_null(self):
        self.json = self.generate_simple_data_null()
        self.dict_add_verify(self.json, "test_add_null")

    def test_add_boolean(self):
        self.json = self.generate_simple_data_boolean()
        self.dict_add_verify(self.json, "test_add_boolean")

    def test_add_array_mix(self):
        self.json = self.generate_simple_data_mix_arrays()
        self.dict_add_verify(self.json, "test_add_array_mix")

# SD_UPSERT - Add Operations

    def test_upsert_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_upsert_verify(self.json, "test_upsert_numbers")

    def test_upsert_array_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_upsert_verify(self.json, "test_upsert_array_of_numbers")

    def test_upsert_numbers_boundary(self):
        self.json = self.generate_simple_data_numbers()
        self.dict_upsert_verify(self.json, "test_upsert_numbers_boundary")

    def test_upsert_strings(self):
        self.json = self.generate_simple_data_strings()
        self.dict_upsert_verify(self.json, "test_upsert_string")

    def test_upsert_array_strings(self):
        self.json = self.generate_simple_data_array_strings()
        self.dict_upsert_verify(self.json, "test_upsert_array_strings")

    def test_upsert_null(self):
        self.json = self.generate_simple_data_null()
        self.dict_upsert_verify(self.json, "test_upsert_null")

    def test_upsert_boolean(self):
        self.json = self.generate_simple_data_boolean()
        self.dict_upsert_verify(self.json, "test_upsert_boolean")

    def test_upsert_array_mix(self):
        self.json = self.generate_simple_data_mix_arrays()
        self.dict_upsert_verify(self.json, "test_upsert_array_mix")

# SD_UPERT - Replace Operations

    def test_upsert_replace_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_upsert_replace_verify(self.json,
                                        "test_upsert_replace_numbers")

    def test_upsert_replace_numbers_expiry(self):
        # MB-32364
        # subdoc.subdoc_simple_dataset.SubdocSimpleDataset.test_upsert_replace_numbers_expiry
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_upsert_replace_verify(self.json,
                                        "test_upsert_replace_numbers",
                                        create=True,
                                        expiry=30)

    def test_upsert_replace_array_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_upsert_replace_verify(
            self.json, "test_upsert_replace_array_of_numbers")

    def test_upsert_replace_numbers_boundary(self):
        self.json = self.generate_simple_data_numbers()
        self.dict_upsert_replace_verify(
            self.json, "test_upsert_replace_numbers_boundary")

    def test_upsert_replace_strings(self):
        self.json = self.generate_simple_data_strings()
        self.dict_upsert_replace_verify(self.json,
                                        "test_upsert_replace_string")

    def test_upsert_replace_array_strings(self):
        self.json = self.generate_simple_data_array_strings()
        self.dict_upsert_replace_verify(self.json,
                                        "test_upsert_replace_array_strings")

    def test_upsert_replace_null(self):
        self.json = self.generate_simple_data_null()
        self.dict_upsert_replace_verify(self.json, "test_upsert_replace_null")

    def test_upsert_replace_boolean(self):
        self.json = self.generate_simple_data_boolean()
        self.dict_upsert_replace_verify(self.json,
                                        "test_upsert_replace_boolean")

    def test_upsert_replace_array_mix(self):
        self.json = self.generate_simple_data_mix_arrays()
        self.dict_upsert_replace_verify(self.json,
                                        "test_upsert_replace_array_mix")

    def test_xattr_compression(self):
        # MB-32669
        # subdoc.subdoc_simple_dataset.SubdocSimpleDataset.test_xattr_compression,compression=active
        mc = MemcachedClient(self.master.ip, 11210)
        mc.sasl_auth_plain(self.master.rest_username,
                           self.master.rest_password)
        mc.bucket_select('default')

        self.key = "test_xattr_compression"
        self.nesting_level = 5
        array = {'i_add': 0, 'i_sub': 1, 'a_i_a': [0, 1], 'ai_sub': [0, 1]}
        base_json = self.generate_json_for_nesting()
        nested_json = self.generate_nested(base_json, array,
                                           self.nesting_level)
        jsonDump = json.dumps(nested_json)
        stats = mc.stats()
        self.assertEquals(stats['ep_compression_mode'], 'active')

        scheme = "http"
        host = "{0}:{1}".format(self.master.ip, self.master.port)
        self.sdk_client = SDKClient(scheme=scheme,
                                    hosts=[host],
                                    bucket="default")

        self.sdk_client.set(self.key, value=jsonDump, ttl=60)
        rv = self.sdk_client.cb.mutate_in(self.key,
                                          SD.upsert('my.attr',
                                                    "value",
                                                    xattr=True,
                                                    create_parents=True),
                                          ttl=60)
        self.assertTrue(rv.success)

        # wait for it to persist and then evict the key
        persisted = 0
        while persisted == 0:
            opaque, rep_time, persist_time, persisted, cas = mc.observe(
                self.key)

        mc.evict_key(self.key)
        time.sleep(65)
        try:
            self.client.get(self.key)
            self.fail("the key should get expired")
        except mc_bin_client.MemcachedError as error:
            self.assertEquals(error.status, 1)

        stats = mc.stats()
        self.assertEquals(int(stats['curr_items']), 0)
        self.assertEquals(int(stats['curr_temp_items']), 0)

# SD_REPLACE - Replace Operations

    def test_replace_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_replace_verify(self.json, "test_replace_numbers")

    def test_replace_array_numbers(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_replace_verify(self.json, "test_replace_array_of_numbers")

    def test_replace_numbers_boundary(self):
        self.json = self.generate_simple_data_numbers()
        self.dict_replace_verify(self.json, "test_replace_numbers_boundary")

    def test_replace_strings(self):
        self.json = self.generate_simple_data_strings()
        self.dict_replace_verify(self.json, "test_replace_string")

    def test_replace_array_strings(self):
        self.json = self.generate_simple_data_array_strings()
        self.dict_replace_verify(self.json, "test_replace_array_strings")

    def test_replace_null(self):
        self.json = self.generate_simple_data_null()
        self.dict_replace_verify(self.json, "test_replace_null")

    def test_replace_boolean(self):
        self.json = self.generate_simple_data_boolean()
        self.dict_replace_verify(self.json, "test_replace_boolean")

    def test_replace_array_mix(self):
        self.json = self.generate_simple_data_mix_arrays()
        self.dict_replace_verify(self.json, "test_replace_array_mix")

# SD_DELETE

    def test_delete_dict(self):
        self.json = self.generate_simple_data_array_of_numbers()
        self.dict_delete_verify(self.json, "test_delete_array")

    def test_delete_array(self):
        result = True
        self.key = "test_delete_array"
        array = {
            "numbers": [1, 2, 3],
            "strings": ["absde", "dddl", "dkdkd"],
            "two_dimension_array": [["0", "1"]],
            "three_dimension_array": [[["0", "1"]]]
        }
        expected_array = {
            "numbers": [2, 3],
            "strings": ["absde", "dddl"],
            "two_dimension_array": [["1"]],
            "three_dimension_array": [[["1"]]]
        }
        jsonDump = json.dumps(array)
        self.client.set(self.key, 0, 0, jsonDump)
        self.delete(self.client, key="test_delete_array", path='numbers[0]')
        self.delete(self.client, key="test_delete_array", path='strings[2]')
        self.delete(self.client,
                    key="test_delete_array",
                    path='two_dimension_array[0][0]')
        self.delete(self.client,
                    key="test_delete_array",
                    path='three_dimension_array[0][0][0]')
        self.json = expected_array
        for key in expected_array.keys():
            value = expected_array[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {
                    "expected": expected_array[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, dict)

# Helper Methods

    def get_verify(self, dataset, data_key="default"):
        dict = {}
        result = True
        self.key = data_key
        self.json = dataset
        jsonDump = json.dumps(self.json)
        self.client.set(self.key, 0, 0, jsonDump)
        for key in self.json.keys():
            value = self.json[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                dict[key] = {"expected": self.json[key], "actual": data_return}
            result = result and logic
        self.assertTrue(result, dict)

    def dict_add_verify(self, dataset, data_key="default"):
        dict = {}
        result_dict = {}
        result = True
        self.key = data_key
        self.json = dataset
        jsonDump = json.dumps(dict)
        self.client.set(self.key, 0, 0, jsonDump)
        for key in self.json.keys():
            value = self.json[key]
            value = json.dumps(value)
            self.dict_add(self.client, self.key, key, value)
        for key in self.json.keys():
            value = self.json[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                result_dict[key] = {
                    "expected": self.json[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, result_dict)

    def dict_upsert_verify(self, dataset, data_key="default"):
        dict = {}
        result_dict = {}
        result = True
        self.key = data_key
        self.json = dataset
        jsonDump = json.dumps(dict)
        self.client.set(self.key, 0, 0, jsonDump)
        for key in self.json.keys():
            value = self.json[key]
            value = json.dumps(value)
            self.dict_upsert(self.client, self.key, key, value)
        for key in self.json.keys():
            value = self.json[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                result_dict[key] = {
                    "expected": self.json[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, result_dict)

    def dict_upsert_replace_verify(self,
                                   dataset,
                                   data_key="default",
                                   create=False,
                                   expiry=0):
        dict = {}
        result_dict = {}
        result = True
        self.key = data_key
        self.json = dataset
        jsonDump = json.dumps(self.json)
        new_json = self.shuffle_json(self.json)
        self.client.set(self.key, 0, 0, jsonDump)
        for key in self.json.keys():
            value = new_json[key]
            value = json.dumps(value)
            self.dict_upsert(self.client,
                             self.key,
                             key,
                             value,
                             create=create,
                             expiry=expiry)
        self.json = new_json
        for key in new_json.keys():
            value = new_json[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                result_dict[key] = {
                    "expected": self.new_json[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, result_dict)

        if expiry != 0:
            time.sleep(expiry + 5)
            try:
                self.client.get(self.key)
                self.fail("Document is not expired")
            except mc_bin_client.MemcachedError as error:
                self.assertEquals(error.status, 1)

    def dict_replace_verify(self, dataset, data_key="default"):
        dict = {}
        result_dict = {}
        result = True
        self.key = data_key
        self.json = dataset
        jsonDump = json.dumps(self.json)
        new_json = self.shuffle_json(self.json)
        self.client.set(self.key, 0, 0, jsonDump)
        for key in self.json.keys():
            value = new_json[key]
            value = json.dumps(value)
            self.dict_replace(self.client, self.key, key, value)
        self.json = new_json
        for key in new_json.keys():
            value = new_json[key]
            logic, data_return = self.get_string_and_verify_return(
                self.client, key=self.key, path=key)
            if not logic:
                result_dict[key] = {
                    "expected": self.new_json[key],
                    "actual": data_return
                }
            result = result and logic
        self.assertTrue(result, result_dict)

    def dict_delete_verify(self, dataset, data_key="default"):
        dict = {}
        result_dict = {}
        result = True
        self.key = data_key
        self.json = dataset
        jsonDump = json.dumps(self.json)
        self.client.set(self.key, 0, 0, jsonDump)
        for key in self.json.keys():
            self.delete(self.client, self.key, key)
            self.json.pop(key)
            for key in self.json.keys():
                value = self.json[key]
                logic, data_return = self.get_string_and_verify_return(
                    self.client, key=self.key, path=key)
                if not logic:
                    result_dict[key] = {
                        "expected": self.new_json[key],
                        "actual": data_return
                    }
                    result = result and logic
            self.assertTrue(result, result_dict)

    def delete(self, client, key='', path=''):
        try:
            self.client.delete_sd(key, path)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def dict_add(self, client, key='', path='', value=None):
        try:
            self.client.dict_add_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def dict_replace(self, client, key='', path='', value=None):
        try:
            self.client.replace_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail("Unable to replace key {0} for path {1} after {2} tries".
                      format(key, path, 1))

    def dict_upsert(self,
                    client,
                    key='',
                    path='',
                    value=None,
                    create=False,
                    expiry=0):
        try:
            self.client.dict_upsert_sd(key,
                                       path,
                                       value,
                                       create=create,
                                       expiry=expiry)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def counter(self, client, key='', path='', value=None):
        try:
            self.client.counter_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def array_add_last(self, client, key='', path='', value=None):
        try:
            self.client.array_push_last_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def array_add_first(self, client, key='', path='', value=None):
        try:
            self.client.array_push_first_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def array_add_unique(self, client, key='', path='', value=None):
        try:
            self.client.array_add_unique_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def array_add_insert(self, client, key='', path='', value=None):
        try:
            self.client.array_add_insert_sd(key, path, value)
        except Exception as e:
            self.log.info(e)
            self.fail(
                "Unable to add key {0} for path {1} after {2} tries".format(
                    key, path, 1))

    def get_and_verify(self, client, key='', path=''):
        try:
            opaque, cas, data = self.client.get_sd(key, path)
        except Exception as e:
            self.fail(
                "Unable to get key {0} for path {1} after {2} tries".format(
                    key, path, 1))
        self.assertTrue(str(data) == str(self.json[path]),
                        msg="data not returned correctly")

    def get_and_verify_with_value(self, client, key='', path='', value=''):
        try:
            opaque, cas, data = self.client.get_sd(key, path)
            data = json.loads(data)
        except Exception as e:
            self.fail(
                "Unable to get key {0} for path {1} after {2} tries".format(
                    key, path, 1))
        self.assertTrue(str(data) == value, msg="data not returned correctly")

    def get_string_and_verify_return(self, client, key='', path=''):
        try:
            opaque, cas, data = self.client.get_sd(key, path)
            data = json.loads(data)
        except Exception as e:
            self.fail(
                "Unable to get key {0} for path {1} after {2} tries".format(
                    key, path, 1))
        return data == self.json[path], data

    def get_and_verify_return(self, client, key='', path=''):
        try:
            opaque, cas, data = self.client.get_sd(key, path)
        except Exception as e:
            self.fail(
                "Unable to get key {0} for path {1} after {2} tries".format(
                    key, path, 1))
        return str(data) == str(self.json[path]), data

    def shuffle_json(self, json_value):
        dict = {}
        keys = json_value.keys()
        for key in keys:
            index = random.randint(0, len(keys) - 1)
            dict[key] = json_value[keys[index]]
        return dict
コード例 #7
0
ファイル: lww_stats.py プロジェクト: arod1987/testrunner
    def test_drift_stats(self):
        '''
        @note: An exercise in filling out the matrix with the right amount of code,
               we want to test (ahead,behind) and (setwithmeta, deleteWithmeta)
               and (active,replica).
               So for now let's do the set/del in sequences
        '''
        self.log.info('starting test_drift_stats')
        #Creating a user with the bucket name having admin access
        payload = "name={0}&roles=admin&password=password".format(
            self.buckets[0].name)
        self.rest.add_set_builtin_user(self.buckets[0].name, payload)
        check_ahead_threshold = self.input.param("check_ahead_threshold",
                                                 True)

        self.log.info('Checking the ahead threshold? {0}'.format(
            check_ahead_threshold))

        sdk_client = SDKClient(scheme='couchbase',
                               hosts = [self.servers[0].ip],
                               bucket = self.buckets[0].name)
        mc_client = MemcachedClientHelper.direct_client(self.servers[0],
                                                        self.buckets[0])
        shell = RemoteMachineShellConnection(self.servers[0])

        # get the current time
        rc = sdk_client.set('key1', 'val1')
        current_time_cas = rc.cas

        test_key = 'test-set-with-metaxxxx'
        vbId = (((zlib.crc32(test_key)) >> 16) & 0x7fff) & (self.vbuckets- 1)

        #import pdb;pdb.set_trace()
        # verifying the case where we are within the threshold, do a set and del, neither should trigger
        #mc_active.setWithMeta(key, '123456789', 0, 0, 123, cas)
        rc = mc_client.setWithMeta(test_key, 'test-value',
                                   0, 0, 1, current_time_cas)
        #rc = mc_client.setWithMetaLWW(test_key, 'test-value', 0, 0, current_time_cas)
        #rc = mc_client.delWithMetaLWW(test_key, 0, 0, current_time_cas+1)

        vbucket_stats = mc_client.stats('vbucket-details')
        ahead_exceeded  = int(vbucket_stats['vb_' + str(vbId) + ':drift_ahead_threshold_exceeded'])
        self.assertTrue(ahead_exceeded == 0,
                        'Ahead exceeded expected is 0 but is {0}'.format( ahead_exceeded))
        behind_exceeded  = int( vbucket_stats['vb_' + str(vbId) + ':drift_behind_threshold_exceeded'] )
        self.assertTrue( behind_exceeded == 0, 'Behind exceeded expected is 0 but is {0}'.format( behind_exceeded))
        # out of curiousity, log the total counts
        self.log.info('Total stats: total abs drift {0} and total abs drift count {1}'.
                      format(vbucket_stats['vb_' + str(vbId) + ':total_abs_drift'],
                             vbucket_stats['vb_' + str(vbId) + ':total_abs_drift_count']))

        # do the ahead set with meta case - verify: ahead threshold exceeded, total_abs_drift count and abs_drift
        if check_ahead_threshold:
            stat_descriptor = 'ahead'
            cas = current_time_cas + 5000 * LWWStatsTests.DEFAULT_THRESHOLD

        else:
            stat_descriptor = 'behind'
            cas = current_time_cas -(5000 * LWWStatsTests.DEFAULT_THRESHOLD)
        rc = mc_client.setWithMeta(test_key, 'test-value', 0, 0, 0, cas)
        #rc = mc_client.delWithMetaLWW(test_key, 0, 0, cas+1)
        # verify the vbucket stats
        vbucket_stats = mc_client.stats('vbucket-details')
        drift_counter_stat = 'vb_' + str(vbId) + ':drift_' + stat_descriptor + '_threshold_exceeded'
        threshold_exceeded  = int( mc_client.stats('vbucket-details')[drift_counter_stat] )
        # MB-21450 self.assertTrue( ahead_exceeded == 2, '{0} exceeded expected is 1 but is {1}'.
        # format( stat_descriptor, threshold_exceeded))

        self.log.info('Total stats: total abs drift {0} and total abs drift count {1}'.
                      format(vbucket_stats['vb_' + str(vbId) + ':total_abs_drift'],
                             vbucket_stats['vb_' + str(vbId) + ':total_abs_drift_count']))

        # and verify the bucket stats: ep_active_hlc_drift_count, ep_clock_cas_drift_threshold_exceeded,
        # ep_active_hlc_drift
        bucket_stats = mc_client.stats()
        ep_active_hlc_drift_count = int(bucket_stats['ep_active_hlc_drift_count'])
        ep_clock_cas_drift_threshold_exceeded = int(bucket_stats['ep_clock_cas_drift_threshold_exceeded'])
        ep_active_hlc_drift = int(bucket_stats['ep_active_hlc_drift'])

        # Drift count appears to be the number of mutations
        self.assertTrue( ep_active_hlc_drift_count > 0, 'ep_active_hlc_drift_count is 0, expected a positive value')

        # drift itself is the sum of the absolute values of all drifts, so check that it is greater than 0
        self.assertTrue( ep_active_hlc_drift > 0, 'ep_active_hlc_drift is 0, expected a positive value')

        # the actual drift count is a little more granular
        expected_drift_threshold_exceed_count = 1
        self.assertTrue( expected_drift_threshold_exceed_count == ep_clock_cas_drift_threshold_exceeded,
                         'ep_clock_cas_drift_threshold_exceeded is incorrect. Expected {0}, actual {1}'.
                             format(expected_drift_threshold_exceed_count,
                                    ep_clock_cas_drift_threshold_exceeded) )