コード例 #1
0
    def test_cbas_ingestion_with_documents_containing_multilingual_data(self):
        """
        1. Create reference to SDK client
        2. Add multilingual json documents to default bucket
        3. Verify ingestion on dataset with and with out secondary index
        """
        multilingual_strings = [
            'De flesta sagorna här är från Hans Hörner svenska översättning',
            'Il était une fois une maman cochon qui avait trois petits cochons',
            '森林里住着一只小兔子,它叫“丑丑”。它的眼睛红红的,像一对红宝石',
            '外治オヒル回条フ聞定ッ加官言岸ムモヱツ求碁込ヌトホヒ舞高メ旅位',
            'ان عدة الشهور عند الله اثنا عشر شهرا في',
        ]

        self.log.info("Fetch test case arguments")
        self.fetch_test_case_arguments()

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase", hosts=[self.master.ip], bucket=self.cb_bucket_name,
                           password=self.master.rest_password)

        self.log.info("Add multilingual documents to the default bucket")
        client.insert_custom_json_documents("custom-key-", multilingual_strings)

        self.log.info("Create connections, datasets and indexes")
        self.cbas_dataset_setup()

        self.log.info("Wait for ingestion to complete and verify count")
        self.cbas_util.wait_for_ingestion_complete([self.dataset_name], len(multilingual_strings))
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, len(multilingual_strings)))
コード例 #2
0
    def test_ingestion_impact_for_documents_containing_xattr_meta_information(self):

        self.log.info("Fetch test case arguments")
        self.fetch_test_case_arguments()

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase", hosts=[self.master.ip], bucket="default",
                           password=self.master.rest_password)

        self.log.info("Insert custom data into default bucket")
        documents = ['{"name":"value"}'] * self.num_of_documents
        document_id_prefix = "id-"
        client.insert_custom_json_documents(document_id_prefix, documents)

        self.log.info("Create connections, datasets, indexes")
        self.cbas_dataset_setup()

        self.log.info("Wait for ingestion to complete and verify count")
        self.cbas_util.wait_for_ingestion_complete([self.dataset_name], self.num_of_documents)
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, self.num_of_documents))

        self.log.info("Insert xattr attribute for all the documents and assert document count on dataset")
        for i in range(self.num_of_documents):
            client.insert_xattr_attribute(document_id=document_id_prefix + str(i), path="a", value="{'xattr-value': 1}",
                                          xattr=True, create_parents=True)
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, self.num_of_documents))

        self.log.info("Update xattr attribute and assert document count on dataset")
        for i in range(self.num_of_documents):
            client.update_xattr_attribute(document_id=document_id_prefix + str(i), path="a",
                                          value="{'xattr-value': 11}", xattr=True, create_parents=True)
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, self.num_of_documents))
コード例 #3
0
    def test_swap_rebalance_cb_cbas_together(self):

        self.log.info("Creates cbas buckets and dataset")
        wait_for_rebalance = self.input.param("wait_for_rebalance", True)
        dataset_count_query = "select count(*) from {0};".format(
            self.cbas_dataset_name)
        self.setup_for_test()

        self.log.info("Add KV node and don't rebalance")
        self.add_node(node=self.rebalanceServers[1], rebalance=False)

        self.log.info("Add cbas node and don't rebalance")
        self.add_node(node=self.rebalanceServers[3], rebalance=False)

        otpnodes = []
        nodes = self.rest.node_statuses()
        for node in nodes:
            if node.ip == self.rebalanceServers[
                    0].ip or node.ip == self.rebalanceServers[2].ip:
                otpnodes.append(node)

        self.log.info("Remove master node")
        self.remove_node(otpnode=otpnodes,
                         wait_for_rebalance=wait_for_rebalance)
        self.master = self.rebalanceServers[1]

        self.log.info("Create instances pointing to new master nodes")
        c_utils = cbas_utils(self.rebalanceServers[1],
                             self.rebalanceServers[3])
        c_utils.createConn(self.cb_bucket_name)

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase",
                           hosts=[self.rebalanceServers[1].ip],
                           bucket=self.cb_bucket_name,
                           password=self.rebalanceServers[1].rest_password)

        self.log.info("Add more document to default bucket")
        documents = ['{"name":"value"}'] * (self.num_items // 10)
        document_id_prefix = "custom-id-"
        client.insert_custom_json_documents(document_id_prefix, documents)

        self.log.info(
            "Run queries as rebalance is in progress : Rebalance state:%s" %
            self.rest._rebalance_progress_status())
        handles = c_utils._run_concurrent_queries(
            dataset_count_query,
            "immediate",
            2000,
            batch_size=self.concurrent_batch_size)

        self.log.info("Log concurrent query status")
        self.cbas_util.log_concurrent_query_outcome(self.master, handles)

        if not c_utils.validate_cbas_dataset_items_count(
                self.cbas_dataset_name, self.num_items +
            (self.num_items // 10), 0):
            self.fail(
                "No. of items in CBAS dataset do not match that in the CB bucket"
            )
コード例 #4
0
ファイル: cbas_infer_schema.py プロジェクト: umang-cb/Jython
    def verify_infer_schema_on_documents_with_overlapping_types_same_keys(self):
        self.log.info('Create unique documents')
        client = SDKClient(hosts=[self.master.ip], bucket=self.cb_bucket_name, password=self.master.rest_password)
        documents = [
            '{ "array": 20, "integer": 300.34345, "float":false, "boolean":["a", "b", "c"], "null":null, "string":"300"}'
            '{ "array": 300, "integer": ["a", "b", "c"], "float":1012.56756, "boolean":300, "null":null, "string":10345665}'
            '{ "array": [1, 2, 3], "integer": 10345665, "float":"Steve", "boolean":[[1, 2], [3, 4], []], "null":null, "string":20.343}'
            '{ "array": 20.343, "integer": [[1, 2], [3, 4], []], "float":0.00011, "boolean":[1, 1.1, 1.0, 0], "null":null, "string":[1, 1.1, 1.0, 0]}'
            '{ "array": 10345665, "integer": 1012.56756, "float":1012.56756, "boolean":false, "null":null, "string":[1, "hello", ["a", 1], 2.22]}'
            '{ "array": "Steve", "integer": 1, "float":[1, 2, 3], "boolean":false, "null":null, "string":1}'
            '{ "array": true, "integer": 1.1, "float":20.343, "boolean":["a", "b", "c"], "null":null, "string":300}'
            '{ "array": true, "integer": "Alex", "float":10345665, "boolean":1.1, "null":null, "string":0.00011}'
            '{ "array": 20.343, "integer": 20.343, "float":300, "boolean":true, "null":null, "string":true}'
            '{ "array": 1, "integer": 10345665, "float":300.34345, "boolean":1, "null":null, "string":true}'
        ]
        for index, document in enumerate(documents):
            client.insert_document(str(index), document)

        self.log.info('Create primary index on %s' % self.cb_bucket_name)
        self.rest.query_tool('CREATE PRIMARY INDEX idx on %s' % self.cb_bucket_name)

        self.log.info('Create dataset')
        self.cbas_util.createConn(self.cb_bucket_name)
        self.cbas_util.create_dataset_on_bucket(self.cb_bucket_name, self.cbas_dataset_name)

        self.log.info('Connect link')
        self.cbas_util.connect_link()

        self.log.info('Verify dataset count')
        self.cbas_util.validate_cbas_dataset_items_count(self.cbas_dataset_name, len(documents))

        self.validate_infer_schema_response(len(documents))
コード例 #5
0
    def __load_chain(self, start_num=0):
        for i, cluster in enumerate(self.get_cb_clusters()):
            if self._rdirection == REPLICATION_DIRECTION.BIDIRECTION:
                if i > len(self.get_cb_clusters()) - 1:
                    break
            else:
                if i >= len(self.get_cb_clusters()) - 1:
                    break
            if not self._dgm_run:
                for bucket in cluster.get_buckets():
                    client = SDKClient(scheme="couchbase", hosts=[cluster.get_master_node().ip],
                                            bucket=bucket.name).cb
                    for i in range(start_num, start_num + self._num_items):
                        key = 'k_%s_%s' % (i, str(cluster).replace(' ', '_').
                                           replace('.', '_').replace(',', '_').replace(':', '_'))
                        value = {'xattr_%s' % i:'value%s' % i}
                        client.upsert(key, value)
                        client.mutate_in(key, SD.upsert('xattr_%s' % i, 'value%s' % i,
                                                             xattr=True,
                                                             create_parents=True))
                        partition = bucket.kvs[1].acquire_partition(key)#["partition"]
                        if self.only_store_hash:
                            value = str(crc32.crc32_hash(value))
                        res = client.get(key)
                        partition.set(key, json.dumps(value), 0, res.flags)
                        bucket.kvs[1].release_partition(key)

            else:
                cluster.load_all_buckets_till_dgm(
                    active_resident_threshold=self._active_resident_threshold,
                    items=self._num_items)
コード例 #6
0
    def test_logical_clock_ticks(self):

        self.log.info('starting test_logical_clock_ticks')

        payload = "name={0}&roles=admin&password=password".format(
            self.buckets[0].name)
        self.rest.add_set_builtin_user(self.buckets[0].name, payload)
        sdk_client = SDKClient(scheme='couchbase',
                               hosts=[self.servers[0].ip],
                               bucket=self.buckets[0].name)
        mc_client = MemcachedClientHelper.direct_client(
            self.servers[0], self.buckets[0])
        shell = RemoteMachineShellConnection(self.servers[0])

        # do a bunch of mutations to set the max cas
        gen_load = BlobGenerator('key-for-cas-test-logical-ticks',
                                 'value-for-cas-test-',
                                 self.value_size,
                                 end=10000)
        self._load_all_buckets(self.master, gen_load, "create", 0)

        vbucket_stats = mc_client.stats('vbucket-details')
        base_total_logical_clock_ticks = 0
        for i in range(self.vbuckets):
            #print vbucket_stats['vb_' + str(i) + ':logical_clock_ticks']
            base_total_logical_clock_ticks = base_total_logical_clock_ticks + int(
                vbucket_stats['vb_' + str(i) + ':logical_clock_ticks'])
        self.log.info('The base total logical clock ticks is {0}'.format(
            base_total_logical_clock_ticks))

        # move the system clock back so the logical counter part of HLC is used and the logical clock ticks
        # stat is incremented
        self.assertTrue(
            shell.change_system_time(-LWWStatsTests.ONE_HOUR_IN_SECONDS),
            'Failed to advance the clock')

        # do more mutations
        NUMBER_OF_MUTATIONS = 10000
        gen_load = BlobGenerator('key-for-cas-test-logical-ticks',
                                 'value-for-cas-test-',
                                 self.value_size,
                                 end=NUMBER_OF_MUTATIONS)
        self._load_all_buckets(self.master, gen_load, "create", 0)

        vbucket_stats = mc_client.stats('vbucket-details')
        time.sleep(30)
        total_logical_clock_ticks = 0
        for i in range(self.vbuckets):
            total_logical_clock_ticks = total_logical_clock_ticks + int(
                vbucket_stats['vb_' + str(i) + ':logical_clock_ticks'])

        self.log.info('The total logical clock ticks is {0}'.format(
            total_logical_clock_ticks))

        self.assertTrue(
            total_logical_clock_ticks -
            base_total_logical_clock_ticks == NUMBER_OF_MUTATIONS,
            'Expected clock tick {0} actual {1}'.format(
                NUMBER_OF_MUTATIONS,
                total_logical_clock_ticks - base_total_logical_clock_ticks))
コード例 #7
0
    def test_add_concurrent(self):
        DOCID = 'subdoc_doc_id'
        SERVER_IP = self.servers[0].ip
        ITERATIONS = 200
        THREADS = 20

        main_bucket = SDKClient(scheme="couchbase", hosts=[self.servers[0].ip],
                  bucket='default').cb
        main_bucket.upsert(DOCID, {'recs':[]})

        class Runner(Thread):
            def run(self, *args, **kw):
                cb = SDKClient(scheme="couchbase", hosts=[SERVER_IP],
                  bucket='default').cb
                for x in range(ITERATIONS):
                    cb.mutate_in(DOCID, SD.array_append('recs', 1))

        thrs = [Runner() for x in range(THREADS)]
        [t.start() for t in thrs]
        [t.join() for t in thrs]

        obj = main_bucket.get(DOCID)

        array_entry_count = len(obj.value['recs'])

        self.assertTrue(array_entry_count == ITERATIONS * THREADS,
                         'Incorrect number of array entries. Expected {0} actual {1}'.format(ITERATIONS * THREADS,
                                                                           array_entry_count))
コード例 #8
0
ファイル: cbas_infer_schema.py プロジェクト: umang-cb/Jython
    def verify_infer_schema_on_same_type_missing_fields(self):
        self.log.info('Create unique documents')
        client = SDKClient(hosts=[self.master.ip], bucket=self.cb_bucket_name, password=self.master.rest_password)
        documents = [
            '{ "array": [1,2,3,4,5,6], "integer": 10, "float":10.12, "boolean":true, "null":null, "object":{"array": [1,2,3,4,5,6], "integer": 10, "float":10.12, "boolean":true, "null":null}}',
            '{ "array": null, "integer": 10, "float":10.12, "boolean":true, "null":null, "object":{"array": [1,2,3,4,5,6], "integer": 10, "float":10.12, "boolean":true, "null":"null value"}}',
            '{ "array": [1,2,3,4,5,6], "integer": 10, "float":10.12, "boolean":true, "null":null, "object":{"array": null, "integer": null, "float":null, "boolean":null, "null":null}}'
            ]
        for index, document in enumerate(documents):
            client.insert_document(str(index), document)

        self.log.info('Create primary index on %s' % self.cb_bucket_name)
        self.rest.query_tool('CREATE PRIMARY INDEX idx on %s' % self.cb_bucket_name)

        self.log.info('Create dataset')
        self.cbas_util.createConn(self.cb_bucket_name)
        self.cbas_util.create_dataset_on_bucket(self.cb_bucket_name, self.cbas_dataset_name)

        self.log.info('Connect link')
        self.cbas_util.connect_link()

        self.log.info('Verify dataset count')
        self.cbas_util.validate_cbas_dataset_items_count(self.cbas_dataset_name, len(documents))

        self.validate_infer_schema_response(len(documents))
コード例 #9
0
ファイル: cbas_infer_schema.py プロジェクト: umang-cb/Jython
    def verify_infer_schema_on_unique_nested_documents(self):
        self.log.info('Create unique documents')
        client = SDKClient(hosts=[self.master.ip], bucket=self.cb_bucket_name, password=self.master.rest_password)
        documents = [
            '{"from": "user_512", "to": "user_768", "text": "Hey, that Beer you recommended is pretty fab, thx!", "sent_timestamp":476560}',
            '{"user_id": 512, "name": "Bob Likington", "email": "*****@*****.**", "sign_up_timestamp": 1224612317, "last_login_timestamp": 1245613101}',
            '{"user_id": 768, "name": "Simon Neal", "email": "*****@*****.**", "sign_up_timestamp": 1225554317, "last_login_timestamp": 1234166701, "country": "Scotland", "pro_account": true, "friends": [512, 666, 742, 1111]}',
            '{"photo_id": "ccbcdeadbeefacee", "size": { "w": 500, "h": 320, "unit": "px" }, "exposure": "1/1082", "aperture": "f/2.4", "flash": false, "camera": { "name": "iPhone 4S", "manufacturer": {"Company":"Apple", "Location": {"City":"California", "Country":"USA"} } }, "user_id": 512, "timestamp": [2011, 12, 13, 16, 31, 7]}'
            ]
        for index, document in enumerate(documents):
            client.insert_document(str(index), document)

        self.log.info('Create primary index on %s' % self.cb_bucket_name)
        self.rest.query_tool('CREATE PRIMARY INDEX idx on %s' % self.cb_bucket_name)

        self.log.info('Create dataset')
        self.cbas_util.createConn(self.cb_bucket_name)
        self.cbas_util.create_dataset_on_bucket(self.cb_bucket_name, self.cbas_dataset_name)

        self.log.info('Connect link')
        self.cbas_util.connect_link()

        self.log.info('Verify dataset count')
        self.cbas_util.validate_cbas_dataset_items_count(self.cbas_dataset_name, len(documents))

        self.validate_infer_schema_response(len(documents), compare_with_n1ql=False)
コード例 #10
0
 def _direct_client(self, server, bucket, timeout=30):
     # CREATE SDK CLIENT
     if self.use_sdk_client:
         try:
             from sdk_client import SDKClient
             scheme = "couchbase"
             host = self.master.ip
             if self.master.ip == "127.0.0.1":
                 scheme = "http"
                 host="{0}:{1}".format(self.master.ip, self.master.port)
             return SDKClient(scheme=scheme, hosts = [host], bucket = bucket)
         except ImportError:
             from sdk_client3 import SDKClient
             return SDKClient(RestConnection(self.master), bucket = bucket)
         except Exception as ex:
             self.log.error("cannot load sdk client due to error {0}".format(str(ex)))
     # USE MC BIN CLIENT WHEN NOT USING SDK CLIENT
     return self.direct_mc_bin_client(server, bucket, timeout= timeout)
コード例 #11
0
    def test_MB_32114(self):
        try:
            from sdk_client import SDKClient
        except:
            from sdk_client3 import SDKClient
        import couchbase.subdocument as SD

        rest = RestConnection(self.master)
        client = VBucketAwareMemcached(rest, 'default')
        if self.maxttl:
            self._expiry_pager(self.master)
        sdk_client = SDKClient(scheme='couchbase',
                               hosts=[self.master.ip],
                               bucket='default')
        KEY_NAME = 'key1'

        for i in range(1000):
            mcd = client.memcached(KEY_NAME + str(i))
            rc = mcd.set(KEY_NAME + str(i), 0, 0,
                         json.dumps({'value': 'value2'}))
            sdk_client.mutate_in(
                KEY_NAME + str(i),
                SD.upsert("subdoc_key",
                          "subdoc_val",
                          xattr=True,
                          create_parents=True))
            # wait for it to persist
            persisted = 0
            while persisted == 0:
                opaque, rep_time, persist_time, persisted, cas = client.observe(
                    KEY_NAME + str(i))

        start_time = time.time()
        self._load_doc_data_all_buckets(batch_size=1000)
        end_time = time.time()

        for i in range(1000):
            try:
                mcd = client.memcached(KEY_NAME + str(i))
                _, flags, exp, seqno, cas = client.memcached(
                    KEY_NAME + str(i)).getMeta(KEY_NAME + str(i))
                rc = mcd.del_with_meta(KEY_NAME + str(i), 0, 0, 2, cas + 1)
            except MemcachedError as exp:
                self.fail("Exception with del_with meta - {0}".format(exp))
        self.cluster.compact_bucket(self.master, "default")
        if self.maxttl:
            time_to_sleep = (self.maxttl - (end_time - start_time)) + 20
            self.sleep(int(time_to_sleep))
        else:
            self.sleep(60)
        active_bucket_items = rest.get_active_key_count("default")
        replica_bucket_items = rest.get_replica_key_count("default")
        print('active_bucket_items ', active_bucket_items)
        print('replica_bucket_items ', replica_bucket_items)
        if active_bucket_items * self.num_replicas != replica_bucket_items:
            self.fail("Mismatch in data !!!")
コード例 #12
0
 def test_sdk_client(self):
     """
         Test SDK Client Calls
     """
     scheme = "couchbase"
     host = self.master.ip
     if self.master.ip == "127.0.0.1":
         scheme = "http"
         host = "{0}:{1}".format(self.master.ip, self.master.port)
     client = SDKClient(scheme=scheme, hosts=[host], bucket="default")
     client.remove("1", quiet=True)
     client.insert("1", "{1:2}")
     flag, cas, val = client.get("1")
     self.log.info("val={0},flag={1},cas={2}".format(val, flag, cas))
     client.upsert("1", "{1:3}")
     client.touch("1", ttl=100)
     flag, cas, val = client.get("1")
     self.assertTrue(val == "{1:3}", val)
     self.log.info("val={0},flag={1},cas={2}".format(val, flag, cas))
     client.remove("1", cas=cas)
     client.incr("key", delta=20, initial=5)
     flag, cas, val = client.get("key")
     self.assertTrue(val == 5)
     self.log.info("val={0},flag={1},cas={2}".format(val, flag, cas))
     client.incr("key", delta=20, initial=5)
     flag, cas, val = client.get("key")
     self.assertTrue(val == 25)
     self.log.info("val={0},flag={1},cas={2}".format(val, flag, cas))
     client.decr("key", delta=20, initial=5)
     flag, cas, val = client.get("key")
     self.assertTrue(val == 5)
     print flag, cas, val
     client.upsert("key1", "document1")
     client.upsert("key2", "document2")
     client.upsert("key3", "document3")
     set = client.get_multi(["key1", "key2"])
     self.log.info(set)
     client.upsert_multi({"key1": "{1:2}", "key2": "{3:2}"})
     set = client.get_multi(["key1", "key2"])
     self.log.info(set)
     client.touch_multi(["key1", "key2"], ttl=200)
     set = client.get_multi(["key1", "key2"])
     self.log.info(set)
     data = client.observe("key1")
     self.log.info(data)
     data = client.observe_multi(["key1", "key2"])
     self.log.info(data)
     stats = client.stats(["key1"])
     self.log.info(stats)
     client.n1ql_request(
         client.n1ql_query('create primary index on default')).execute()
     query = client.n1ql_query('select * from default')
     request = client.n1ql_request(query)
     obj = request.get_single_result()._jsobj
     self.log.info(obj)
     client.close()
コード例 #13
0
    def test_ephemeral_bucket_NRU_eviction_access_in_the_delete_range(self):
        """
        generate_load = BlobGenerator(EvictionKV.KEY_ROOT, 'param2', self.value_size, start=0, end=self.num_items)
        self._load_all_ephemeral_buckets_until_no_more_memory(self.servers[0], generate_load, "create", 0, self.num_items)


        # figure out how many items were loaded and load 10% more
        rest = RestConnection(self.servers[0])
        itemCount = rest.get_bucket(self.buckets[0]).stats.itemCount

        self.log.info( 'Reached OOM, the number of items is {0}'.format( itemCount))


        """

        # select some keys which we expect to be adjacent to the kvs which will be deleted
        # and how many KVs should we select, maybe that is a parameter

        itemCount = 50000
        max_delete_value = itemCount / 10
        NUM_OF_ACCESSES = 50
        keys_to_access = set()
        for i in range(NUM_OF_ACCESSES):
            keys_to_access.add(random.randint(0, max_delete_value))

        # and then do accesses on the key set
        client = SDKClient(hosts=[self.master.ip], bucket=self.buckets[0])
        for i in keys_to_access:
            # and we may want to parameterize the get at some point
            rc = client.get(EvictionKV.KEY_ROOT + str(i), no_format=True)

        # and then do puts to delete out stuff
        PERCENTAGE_TO_ADD = 10
        incremental_kv_population = BlobGenerator(EvictionKV.KEY_ROOT,
                                                  'param2',
                                                  self.value_size,
                                                  start=itemCount,
                                                  end=itemCount *
                                                  PERCENTAGE_TO_ADD / 100)
        self._load_bucket(self.buckets[0],
                          self.master,
                          incremental_kv_population,
                          "create",
                          exp=0,
                          kv_store=1)

        # and verify that the touched kvs are still there
        for i in keys_to_access:
            # and we may want to parameterize the get at some point
            rc = client.get(EvictionKV.KEY_ROOT + str(i), no_format=True)
            self.assertFalse(
                rc is None,
                'Key {0} was incorrectly deleted'.format(EvictionKV.KEY_ROOT +
                                                         str(i)))
コード例 #14
0
 def async_load_data_till_upgrade_completes():
     self.log.info("Started doc operations on KV")
     client = SDKClient(hosts=[self.master.ip],
                        bucket=self.cb_bucket_name,
                        password=self.master.rest_password)
     i = 0
     while not self._STOP_INGESTION:
         client.insert_document(
             "key-id" + str(i),
             '{"name":"James_' + str(i) + '", "profession":"Pilot"}')
         i += 1
コード例 #15
0
 def async_load_data():
     self.log.info(
         "Performing doc operations on KV until upgrade is in progress")
     client = SDKClient(hosts=[self.master.ip],
                        bucket=self.cb_bucket_name,
                        password=self.master.rest_password)
     i = 0
     while not _STOP_INGESTION:
         client.insert_document(
             "key-id" + str(i),
             '{"name":"James_' + str(i) + '", "profession":"Pilot"}')
         i += 1
コード例 #16
0
    def load_docs(self,
                  node,
                  num_docs,
                  bucket='default',
                  password='',
                  exp=0,
                  flags=0):
        host = node.ip + ":" + node.port
        client = SDKClient(bucket="default", hosts=[host], scheme="http")

        for i in range(num_docs):
            key = "key%s" % i
            rc = client.upsert(key, "value")
コード例 #17
0
 def _direct_client(self, server, bucket, timeout=30):
     # CREATE SDK CLIENT
     if self.use_sdk_client:
         try:
             from sdk_client import SDKClient
             scheme = "couchbase"
             host = self.master.ip
             if self.master.ip == "127.0.0.1":
                 scheme = "http"
                 host="{0}:{1}".format(self.master.ip,self.master.port)
             return SDKClient(scheme=scheme,hosts = [host], bucket = bucket, password=self.master.rest_password)
         except Exception, ex:
             self.log.error("cannot load sdk client due to error {0}".format(str(ex)))
コード例 #18
0
    def test_xattr_compression(self):
        # MB-32669
        # subdoc.subdoc_simple_dataset.SubdocSimpleDataset.test_xattr_compression,compression=active
        mc = MemcachedClient(self.master.ip, 11210)
        mc.sasl_auth_plain(self.master.rest_username,
                           self.master.rest_password)
        mc.bucket_select('default')

        self.key = "test_xattr_compression"
        self.nesting_level = 5
        array = {'i_add': 0, 'i_sub': 1, 'a_i_a': [0, 1], 'ai_sub': [0, 1]}
        base_json = self.generate_json_for_nesting()
        nested_json = self.generate_nested(base_json, array,
                                           self.nesting_level)
        jsonDump = json.dumps(nested_json)
        stats = mc.stats()
        self.assertEquals(stats['ep_compression_mode'], 'active')

        scheme = "http"
        host = "{0}:{1}".format(self.master.ip, self.master.port)
        self.sdk_client = SDKClient(scheme=scheme,
                                    hosts=[host],
                                    bucket="default")

        self.sdk_client.set(self.key, value=jsonDump, ttl=60)
        rv = self.sdk_client.cb.mutate_in(self.key,
                                          SD.upsert('my.attr',
                                                    "value",
                                                    xattr=True,
                                                    create_parents=True),
                                          ttl=60)
        self.assertTrue(rv.success)

        # wait for it to persist and then evict the key
        persisted = 0
        while persisted == 0:
            opaque, rep_time, persist_time, persisted, cas = mc.observe(
                self.key)

        mc.evict_key(self.key)
        time.sleep(65)
        try:
            self.client.get(self.key)
            self.fail("the key should get expired")
        except mc_bin_client.MemcachedError as error:
            self.assertEquals(error.status, 1)

        stats = mc.stats()
        self.assertEquals(int(stats['curr_items']), 0)
        self.assertEquals(int(stats['curr_temp_items']), 0)
コード例 #19
0
ファイル: cbas_limit_pushdown.py プロジェクト: ritalrw/Jython
    def setUp(self):
        super(CBASLimitPushdown, self).setUp()

        self.log.info("Create a reference to SDK client")
        client = SDKClient(hosts=[self.master.ip],
                           bucket=self.cb_bucket_name,
                           password=self.master.rest_password)

        self.log.info("Insert documents in KV bucket")
        documents = [
            '{"name":"dave","age":19,"gender":"Male","salary":50.0, "married":false, "employed":""}',
            '{"name":"evan","age":25,"gender":"Female","salary":100.15, "married":true}',
            '{"name":"john","age":44,"gender":"Male","salary":150.55, "married":null}',
            '{"name": "sara", "age": 20, "gender": "Female", "salary": 200.34, "married":false}',
            '{"name":"tom","age":31,"gender":"Male","salary":250.99, "married":true}'
        ]
        client.insert_json_documents("id-", documents)

        self.log.info("Create connection")
        self.cbas_util.createConn(self.cb_bucket_name)

        self.log.info("Create primary index on KV bucket")
        self.rest.query_tool("create primary index pri_idx on default")

        self.log.info("Create dataset")
        self.cbas_util.create_dataset_on_bucket(self.cb_bucket_name,
                                                self.cbas_dataset_name)

        self.log.info("Connect to Local link")
        self.cbas_util.connect_link()

        self.log.info("Wait for ingestion to complete")
        self.total_documents = self.rest.query_tool(
            CBASLimitQueries.BUCKET_COUNT_QUERY)['results'][0]['$1']
        self.cbas_util.wait_for_ingestion_complete([self.cbas_dataset_name],
                                                   self.total_documents)

        self.log.info("Fetch partitions")
        shell = RemoteMachineShellConnection(self.cbas_node)
        response = self.cbas_util.fetch_analytics_cluster_response(shell)
        if 'partitions' in response:
            self.partitions = len(response['partitions'])
            self.log.info("Number of data partitions on cluster %d" %
                          self.partitions)
        else:
            self.fail(
                msg=
                "Partitions not found. Failing early to avoid unexpected results"
            )
コード例 #20
0
    def direct_client(self, server, bucket, timeout=30):
        # CREATE SDK CLIENT
        if self.use_sdk_client:
            self.log.info("--> Using SDK client")
            self.is_secure = TestInputSingleton.input.param("is_secure", False)
            try:
                from sdk_client import SDKClient
                self.log.info("--> Try to use SDK2 client")
                if not self.is_secure:
                    scheme = "couchbase"
                else:
                    scheme = "couchbases"
                host = self.master.ip
                if self.master.ip == "127.0.0.1":
                    if not self.is_secure:
                        scheme = "http"
                    else:
                        scheme = "https"
                    host = "{0}:{1}".format(self.master.ip, self.master.port)
                return SDKClient(scheme=scheme,
                                 hosts=[host],
                                 bucket=bucket.name,
                                 username=server.rest_username,
                                 password=server.rest_password)
            except ImportError:
                self.log.info("--> Using SDK3 client")
                from sdk_client3 import SDKClient
                return SDKClient(RestConnection(self.master),
                                 bucket=bucket.name)
            except Exception as ex:
                self.log.error(
                    "cannot load sdk client due to error {0}".format(str(ex)))

        # USE MC BIN CLIENT WHEN NOT USING SDK CLIENT
        self.log.info("--> Using Direct Memcached client")
        return self.direct_mc_bin_client(server, bucket, timeout=timeout)
コード例 #21
0
    def test_ephemeral_bucket_NRU_eviction(self):

        generate_load = BlobGenerator(EvictionKV.KEY_ROOT,
                                      'param2',
                                      self.value_size,
                                      start=0,
                                      end=self.num_items)
        self._load_all_ephemeral_buckets_until_no_more_memory(
            self.servers[0], generate_load, "create", 0, self.num_items)

        # figure out how many items were loaded and load 10% more
        rest = RestConnection(self.servers[0])
        itemCount = rest.get_bucket(self.buckets[0]).stats.itemCount

        self.log.info(
            'Reached OOM, the number of items is {0}'.format(itemCount))

        incremental_kv_population = BlobGenerator(EvictionKV.KEY_ROOT,
                                                  'param2',
                                                  self.value_size,
                                                  start=itemCount,
                                                  end=itemCount * 1.1)
        self._load_bucket(self.buckets[0],
                          self.master,
                          incremental_kv_population,
                          "create",
                          exp=0,
                          kv_store=1)

        # and then probe the keys that are left. For now print out a distribution but later apply some heuristic
        client = SDKClient(hosts=[self.master.ip], bucket=self.buckets[0])

        NUMBER_OF_CHUNKS = 11
        items_in_chunk = int(1.1 * itemCount / NUMBER_OF_CHUNKS)
        for i in range(NUMBER_OF_CHUNKS):
            keys_still_present = 0
            for j in range(items_in_chunk):
                rc = client.get(EvictionKV.KEY_ROOT +
                                str(i * items_in_chunk + j),
                                no_format=True)

                if rc[2] is not None:
                    keys_still_present = keys_still_present + 1

            self.log.info(
                'Chunk {0} has {1:.2f} percent items still present'.format(
                    i, 100 * keys_still_present /
                    (itemCount * 1.1 / NUMBER_OF_CHUNKS)))
コード例 #22
0
ファイル: cbas_stats.py プロジェクト: umang-cb/Jython
    def setUp(self):
        super(CbasStats, self).setUp()

        self.log.info("Add Json documents to default bucket")
        self.perform_doc_ops_in_all_cb_buckets(self.num_items, "create", 0,
                                               self.num_items)

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase",
                           hosts=[self.master.ip],
                           bucket=self.cb_bucket_name,
                           password=self.master.rest_password)

        self.log.info("Insert binary data into default bucket")
        keys = ["%s" % (uuid.uuid4()) for i in range(0, self.num_items)]
        client.insert_binary_document(keys)

        self.log.info("Create connection")
        self.cbas_util.createConn(self.cb_bucket_name)

        self.log.info("Create bucket on CBAS")
        self.assertTrue(
            self.cbas_util.create_bucket_on_cbas(
                cbas_bucket_name=self.cbas_bucket_name,
                cb_bucket_name=self.cb_bucket_name,
                cb_server_ip=self.cb_server_ip),
            "bucket creation failed on cbas")

        self.log.info("Create dataset on the CBAS bucket")
        self.cbas_util.create_dataset_on_bucket(
            cbas_bucket_name=self.cb_bucket_name,
            cbas_dataset_name=self.cbas_dataset_name)

        self.log.info("Connect to Bucket")
        self.cbas_util.connect_to_bucket(
            cbas_bucket_name=self.cbas_bucket_name,
            cb_bucket_password=self.cb_bucket_password)

        statement = "select count(*) from {0} where mutated=0;".format(
            self.cbas_dataset_name)
        status, metrics, errors, results, response_handle = self.cbas_util.execute_statement_on_cbas_util(
            statement, mode=self.mode, timeout=75, analytics_timeout=120)

        if not self.cbas_util.validate_cbas_dataset_items_count(
                self.cbas_dataset_name, self.num_items):
            self.fail(
                "No. of items in CBAS dataset do not match that in the CB bucket"
            )
コード例 #23
0
    def test_nru_eviction_impact_on_cbas(self):

        self.log.info("Create dataset")
        self.cbas_util.create_dataset_on_bucket(self.cb_bucket_name,
                                                self.cbas_dataset_name)

        self.log.info("Connect to Local link")
        self.cbas_util.connect_link()

        self.log.info("Add documents until ram percentage")
        self.load_document_until_ram_percentage()

        self.log.info("Fetch current document count")
        bucket_helper = BucketHelper(self.master)
        item_count = bucket_helper.get_bucket(
            self.cb_bucket_name).stats.itemCount
        self.log.info("Completed base load with %s items" % item_count)

        self.log.info(
            "Fetch initial inserted 100 documents, so they are not removed")
        client = SDKClient(hosts=[self.master.ip],
                           bucket=self.cb_bucket_name,
                           password=self.master.rest_password)
        for i in range(100):
            client.get("test_docs-" + str(i))

        self.log.info("Add 20% more items to trigger NRU")
        for i in range(item_count, int(item_count * 1.2)):
            client.insert_document("key-id" + str(i), '{"name":"dave"}')

        self.log.info("Validate document count on CBAS")
        count_n1ql = self.rest.query_tool(
            'select count(*) from %s' %
            (self.cb_bucket_name))['results'][0]['$1']
        if self.cbas_util.validate_cbas_dataset_items_count(
                self.cbas_dataset_name, count_n1ql):
            pass
        else:
            self.log.info(
                "Document count mismatch might be due to ejection of documents on KV. Retry again"
            )
            count_n1ql = self.rest.query_tool(
                'select count(*) from %s' %
                (self.cb_bucket_name))['results'][0]['$1']
            self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(
                self.cbas_dataset_name, count_n1ql),
                            msg="Count mismatch on CBAS")
コード例 #24
0
    def test_no_eviction_impact_on_cbas(self):

        self.log.info("Create dataset")
        self.cbas_util.create_dataset_on_bucket(self.cb_bucket_name,
                                                self.cbas_dataset_name)

        self.log.info("Connect to Local link")
        self.cbas_util.connect_link()

        self.log.info("Add documents until ram percentage")
        self.load_document_until_ram_percentage()

        self.log.info("Fetch current document count")
        bucket_helper = BucketHelper(self.master)
        item_count = bucket_helper.get_bucket(
            self.cb_bucket_name).stats.itemCount
        self.log.info("Completed base load with %s items" % item_count)

        self.log.info("Load more until we are out of memory")
        client = SDKClient(hosts=[self.master.ip],
                           bucket=self.cb_bucket_name,
                           password=self.master.rest_password)
        i = item_count
        insert_success = True
        while insert_success:
            insert_success = client.insert_document("key-id" + str(i),
                                                    '{"name":"dave"}')
            i += 1

        self.log.info('Memory is full at {0} items'.format(i))
        self.log.info("As a result added more %s items" % (i - item_count))

        self.log.info("Fetch item count")
        stats = bucket_helper.get_bucket(self.cb_bucket_name).stats
        itemCountWhenOOM = stats.itemCount
        memoryWhenOOM = stats.memUsed
        self.log.info('Item count when OOM {0} and memory used {1}'.format(
            itemCountWhenOOM, memoryWhenOOM))

        self.log.info("Validate document count on CBAS")
        count_n1ql = self.rest.query_tool(
            'select count(*) from %s' %
            (self.cb_bucket_name))['results'][0]['$1']
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(
            self.cbas_dataset_name, count_n1ql),
                        msg="Count mismatch on CBAS")
コード例 #25
0
    def test_error_response_type_mismatch_object(self):
        self.log.info("Create a reference to SDK client")
        client = SDKClient(hosts=[self.master.ip],
                           bucket=self.cb_bucket_name,
                           password=self.master.rest_password)

        self.log.info("Insert documents in KV bucket")
        documents = ['{"address":{"city":"NY"}}']
        client.insert_json_documents("id-", documents)

        self.log.info("Create dataset and connect link")
        self.create_dataset_connect_link()

        status, _, errors, _, _ = self.cbas_util.execute_statement_on_cbas_util(
            self.error_response["query"])
        self.validate_error_response(status, errors,
                                     self.error_response["msg"],
                                     self.error_response["code"])
コード例 #26
0
    def test_MB_36087(self):
        try:
            from sdk_client import SDKClient
        except:
            from sdk_client3 import SDKClient
        import couchbase.subdocument as SD

        g_key = "test_doc"
        bucket_name = "default"
        sdk_client = SDKClient(scheme='couchbase',
                               hosts=[self.master.ip],
                               bucket=bucket_name)
        rest = RestConnection(self.master)
        client = VBucketAwareMemcached(rest, bucket_name)
        for i in range(self.num_items):
            key = g_key + str(i)
            mcd = client.memcached(key)
            rc = mcd.set(key, 0, 0, json.dumps({'value': 'value2'}))
            sdk_client.mutate_in(
                key,
                SD.upsert("subdoc_key",
                          "subdoc_val",
                          xattr=True,
                          create_parents=True))
            # Wait for key to persist
            persisted = 0
            while persisted == 0:
                opaque, rep_time, persist_time, persisted, cas = \
                    client.observe(key)

            time.sleep(10)
            # Evict the key
            try:
                rc = mcd.evict_key(key)
            except MemcachedError as exp:
                self.fail("Exception with evict meta - %s" % exp)

            # Perform del_with_meta
            try:
                mcd = client.memcached(key)
                _, flags, exp, seqno, cas = client.memcached(key).getMeta(key)
                rc = mcd.del_with_meta(key, 0, 0, 2, cas + 1)
            except MemcachedError as exp:
                self.fail("Exception with del_with meta - {0}".format(exp))
コード例 #27
0
    def key_not_exists_test(self):
        client = SDKClient(hosts = [self.master.ip], bucket = "default")
        KEY_NAME = 'key'

        for i in range(1500):
            client.set(KEY_NAME, "x")
            #For some reason could not get delete to work
            client.remove(KEY_NAME)
            rc = client.get(KEY_NAME)
            #.get is automatically set to quiet for the sdk_client, therefore I look for
            #none to indicate an error, otherwise the sdk_client spends 10 seconds trying
            #to retry the commands and is very slow
            if rc[2] == None:
                pass
            else:
                assert False
            #cas errors do not sleep the test for 10 seconds, plus we need to check that the correct
            #error is being thrown
            try:
                #For some reason replace instead of cas would not reproduce the bug
                client.cas(KEY_NAME, "value", cas = 10)
            except NotFoundError:
                pass
        assert True 
コード例 #28
0
    def test_sdk_subddoc(self):
        """
            Test SDK Client Calls
        """
        scheme = "couchbase"
        host = self.master.ip
        if self.master.ip == "127.0.0.1":
            scheme = "http"
            host = "{0}:{1}".format(self.master.ip, self.master.port)

        client = SDKClient(scheme=scheme, hosts=[host], bucket="default")
        json_document = {"1": 1, "2": 2, "array": [1]}
        document_key = "1"
        client.insert("1", json_document)
        client.insert_in(document_key, "3", 3)
        client.upsert_in(document_key, "4", 4)
        client.upsert_in(document_key, "4", "change_4")
        client.replace_in(document_key, "4", "crap_4")
        client.arrayprepend_in(document_key, "array", "0")
        client.arrayappend_in(document_key, "array", "2")
        client.arrayinsert_in(document_key, "array[1]",
                              "INSERT_VALUE_AT_INDEX_1")
        client.arrayaddunique_in(document_key, "array", "INSERT_UNIQUE_VALUE")
        print json.dumps(client.get(document_key))
コード例 #29
0
    def test_cbas_bucket_connect_with_more_than_eight_active_datasets(self):
        """
        1. Create a cb bucket
        2. Create a cbas bucket
        3. Create 9 datasets
        4. Connect to cbas bucket must fail with error - Maximum number of active writable datasets (8) exceeded
        5. Delete 1 dataset, now the count must be 8
        6. Re-connect the cbas bucket and this time connection must succeed
        7. Verify count in dataset post connect
        """
        self.log.info("Fetch test case arguments")
        self.fetch_test_case_arguments()

        self.log.info("Load data in the default bucket")
        self.perform_doc_ops_in_all_cb_buckets(self.num_items, "create", 0,
                                               self.num_items)

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase",
                           hosts=[self.master.ip],
                           bucket=self.cb_bucket_name,
                           password=self.master.rest_password)

        self.log.info("Insert binary data into default bucket")
        keys = ["%s" % (uuid.uuid4()) for i in range(0, self.num_items)]
        client.insert_binary_document(keys)

        self.log.info("Insert Non-Json string data into default bucket")
        keys = ["%s" % (uuid.uuid4()) for i in range(0, self.num_items)]
        client.insert_string_document(keys)

        self.log.info("Create connection")
        self.cbas_util.createConn(self.cb_bucket_name)

        self.log.info("Create a CBAS bucket")
        self.assertTrue(self.cbas_util.create_bucket_on_cbas(
            cbas_bucket_name=self.cbas_bucket_name,
            cb_bucket_name=self.cb_bucket_name),
                        msg="Failed to create CBAS bucket")

        self.log.info("Create datasets")
        for i in range(1, self.num_of_dataset + 1):
            self.assertTrue(
                self.cbas_util.create_dataset_on_bucket(
                    cbas_bucket_name=self.cb_bucket_name,
                    cbas_dataset_name=self.dataset_prefix + str(i)),
                msg="Failed to create dataset {0}".format(self.dataset_prefix +
                                                          str(i)))

        self.log.info("Verify connect to CBAS bucket must fail")
        self.assertTrue(
            self.cbas_util.connect_to_bucket(
                cbas_bucket_name=self.cbas_bucket_name,
                cb_bucket_password=self.cb_bucket_password,
                validate_error_msg=True,
                expected_error=BucketOperations.CBAS_BUCKET_CONNECT_ERROR_MSG),
            msg="Incorrect error msg while connecting to cbas bucket")

        self.log.info("Drop the last dataset created")
        self.assertTrue(
            self.cbas_util.drop_dataset(cbas_dataset_name=self.dataset_prefix +
                                        str(self.num_of_dataset)),
            msg="Failed to drop dataset {0}".format(self.dataset_prefix +
                                                    str(self.num_of_dataset)))

        self.log.info("Connect to CBAS bucket")
        self.assertTrue(self.cbas_util.connect_to_bucket(
            cbas_bucket_name=self.cbas_bucket_name),
                        msg="Failed to connect to cbas bucket")

        self.log.info("Wait for ingestion to complete and validate count")
        for i in range(1, self.num_of_dataset):
            self.cbas_util.wait_for_ingestion_complete(
                [self.dataset_prefix + str(i)], self.num_items)
            self.assertTrue(
                self.cbas_util.validate_cbas_dataset_items_count(
                    self.dataset_prefix + str(i), self.num_items))
コード例 #30
0
 def run(self, *args, **kw):
     cb = SDKClient(scheme="couchbase", hosts=[SERVER_IP],
       bucket='default').cb
     for x in range(ITERATIONS):
         cb.mutate_in(DOCID, SD.array_append('recs', 1))