Esempio n. 1
0
 def create_default_views(self, is_one_ddoc=False):
     views = [View(self.testcase.default_view_name + "0",
                   'function (doc) {emit(doc.geometry, doc.age);}',
                   dev_view=self.testcase.use_dev_views, is_spatial=True),
             View(self.testcase.default_view_name + "1",
                   'function (doc) {emit(doc.geometry, null);}',
                   dev_view=self.testcase.use_dev_views, is_spatial=True),
             View(self.testcase.default_view_name + "2",
                   'function (doc) {emit(doc.geometry, doc.name);}',
                   dev_view=self.testcase.use_dev_views, is_spatial=True),
             View(self.testcase.default_view_name + "3",
                   'function (doc) {emit(doc.geometry, [doc.name, doc.age]);}',
                   dev_view=self.testcase.use_dev_views, is_spatial=True),
             View(self.testcase.default_view_name + "4",
                   'function (doc) {emit(doc.geometry, {result : {age:doc.age}});}',
                   dev_view=self.testcase.use_dev_views, is_spatial=True)]
     ddocs = []
     if is_one_ddoc:
         ddocs.append(DesignDocument(self.testcase.default_ddoc_name, [],
                                     spatial_views=views))
     else:
         for i in xrange(5):
             ddocs.append(DesignDocument(self.testcase.default_ddoc_name + str(i), [],
                                     spatial_views=[views[i]]))
     for ddoc in ddocs:
         for view in ddoc.spatial_views:
             self.testcase.cluster.create_view(self.testcase.master, ddoc.name, view,
                                               bucket=self.testcase.bucket_name)
     return ddocs
Esempio n. 2
0
 def test_add_spatial_views_case_sensative(self):
     ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[
                               View(self.default_view_name, self.default_map,
                                    dev_view=self.use_dev_views, is_spatial=True),
                               View(self.default_view_name.upper(), self.default_map,
                                    dev_view=self.use_dev_views, is_spatial=True)])
     self.create_ddocs([ddoc])
Esempio n. 3
0
 def create_ddocs(self, is_dev_view):
     mapview = View(self.map_view_name,
                    '''function(doc) {
          emit(doc.integer, doc.string);
       }''',
                    dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test', mapview)
     redview = View(self.red_view_name,
                    '''function(doc) {
          emit([doc.integer, doc.string], doc.integer);
       }''',
                    '''_count''',
                    dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test', redview)
     redview_stats = View(self.red_view_stats_name,
                          '''function(doc) {
          emit(doc.string, doc.string);
       }''',
                          '''_stats''',
                          dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test2', redview_stats)
     # The keys view is there to test the `keys` query parameter. In order
     # to reproduce the ordering bug (MB-16618) there must be more than
     # one document with the same key, hence modulo is used
     modulo = self.num_docs / 3
     keysview = View(self.keys_view_name,
                     '''function(doc) {
          emit(doc.integer % ''' + str(modulo) + ''', doc.string);
       }''',
                     '_count',
                     dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test3', keysview)
     RebalanceHelper.wait_for_persistence(self.master, self.bucket, 0)
Esempio n. 4
0
 def make_ddocs(self, ddocs_num, views_per_ddoc, non_spatial_views_per_ddoc):
     ddocs = []
     for i in xrange(ddocs_num):
         views = []
         for k in xrange(views_per_ddoc):
             views.append(View(self.default_view_name + str(k), self.default_map,
                               dev_view=self.use_dev_views, is_spatial=True))
         non_spatial_views = []
         if non_spatial_views_per_ddoc:
             for k in xrange(non_spatial_views_per_ddoc):
                 non_spatial_views.append(View(self.default_view_name + str(k), 'function (doc) { emit(null, doc);}',
                                   dev_view=self.use_dev_views))
         ddocs.append(DesignDocument(self.default_ddoc_name + str(i), non_spatial_views, spatial_views=views))
     return ddocs
Esempio n. 5
0
 def setUp(self):
     super(RebalanceBaseTest, self).setUp()
     self.doc_ops = self.input.param("doc_ops", "create")
     self.doc_size = self.input.param("doc_size", 10)
     self.key_size = self.input.param("key_size", 0)
     self.zone = self.input.param("zone", 1)
     self.default_view_name = "default_view"
     self.defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
     self.default_view = View(self.default_view_name, self.defaul_map_func,
                              None)
     self.max_verify = self.input.param("max_verify", None)
     self.std_vbucket_dist = self.input.param("std_vbucket_dist", None)
     self.key = 'test_docs'.rjust(self.key_size, '0')
     nodes_init = self.cluster.servers[
         1:self.nodes_init] if self.nodes_init != 1 else []
     self.task.rebalance([self.cluster.master], nodes_init, [])
     self.cluster.nodes_in_cluster.extend([self.cluster.master] +
                                          nodes_init)
     self.bucket_util.create_default_bucket(replica=self.num_replicas)
     self.bucket_util.add_rbac_user()
     self.sleep(10)
     self.gen_load = self.get_doc_generator(0, self.num_items)
     # gen_update is used for doing mutation for 1/2th of uploaded data
     self.gen_update = self.get_doc_generator(0, (self.num_items / 2 - 1))
     self.log.info("==========Finished rebalance base setup========")
def perform_view_tasks(viewMsgList):
    rest = create_rest()

    if isinstance(viewMsgList, dict):
        viewMsgList = [viewMsgList]

    for viewMsg in viewMsgList:
        if "create" in viewMsg:
            ddocMsg = parseDdocMsg(viewMsg['create'])
            for ddoc_name, views in ddocMsg.items():
                view_list = []
                bucket_name = ''
                for view in views:
                    view_list.append(View(view['view_name'], view['map_func'], view['red_func'],
                                          view['dev_view'], view['is_spatial']))
                    bucket_name = view['bucket_name']

                bucket_obj = rest.get_bucket(bucket_name, 2, 2)
                rest.create_ddoc(ddoc_name, bucket_obj, view_list)

        if "delete" in viewMsg:
            for view in viewMsg['delete']:
                viewMsgParsed = parseViewMsg(view)
                bucket_obj = rest.get_bucket(viewMsgParsed['bucket_name'], 2, 2)
                rest.delete_view(bucket_obj, viewMsgParsed['ddoc_name'])
Esempio n. 7
0
 def setUp(self):
     self.ddoc_name = "tuq_ddoc"
     super(QueriesViewsTests, self).setUp()
     self.map_fn = 'function (doc){emit([doc.join_yr, doc.join_mo],doc.name);}'
     self.view_name = "tuq_view"
     self.default_view = View(self.view_name, self.map_fn, None, False)
     self.query_buckets = self.get_query_buckets(check_all_buckets=True)
Esempio n. 8
0
    def test_add_single_spatial_view(self):
        name_lenght = self.input.param('name_lenght', None)
        view_name = self.input.param('view_name', self.default_view_name)
        if name_lenght:
            view_name = ''.join(
                random.choice(string.ascii_lowercase)
                for x in range(name_lenght))
        not_compilable = self.input.param('not_compilable', False)
        error = self.input.param('error', None)
        map_fn = (
            self.default_map,
            'function (doc) {emit(doc.geometry, doc.age);')[not_compilable]

        ddoc = DesignDocument(self.default_ddoc_name, [],
                              spatial_views=[
                                  View(view_name,
                                       map_fn,
                                       dev_view=self.use_dev_views,
                                       is_spatial=True)
                              ])
        try:
            self.create_ddocs([ddoc])
        except Exception as ex:
            if error and str(ex).find(error) != -1:
                self.log.info("Error caught as expected %s" % error)
                return
            else:
                self.fail("Unexpected error appeared during run %s" % ex)
        if error:
            self.fail("Expected error '%s' didn't appear" % error)
    def test_view_cbcollectinfo_with_collection(self):
        self.test_cbworkloadgen_with_collection()
        self.default_design_doc_name = "Doc1"
        self.view_name = self.input.param("view_name", "View")
        self.generate_map_reduce_error = self.input.param(
            "map_reduce_error", False)
        self.default_map_func = 'function (doc) { emit(doc.age, doc.first_name);}'
        self.reduce_fn = "_count"
        expected_num_items = self.num_items
        if self.generate_map_reduce_error:
            self.reduce_fn = "_sum"
            expected_num_items = None

        view = View(self.view_name,
                    self.default_map_func,
                    self.reduce_fn,
                    dev_view=False)
        self.cluster.create_view(self.master, self.default_design_doc_name,
                                 view, 'default', self.wait_timeout * 2)
        query = {"stale": "false", "connection_timeout": 60000}
        try:
            self.cluster.query_view(self.master,
                                    self.default_design_doc_name,
                                    self.view_name,
                                    query,
                                    expected_num_items,
                                    'default',
                                    timeout=self.wait_timeout)
        except Exception as ex:
            if not self.generate_map_reduce_error:
                raise ex
        self.shell.execute_cbcollect_info("%s.zip" % (self.log_filename))
        self.verify_results(self.log_filename)
Esempio n. 10
0
 def test_add_spatial_views_threads(self):
     same_names = self.input.param('same-name', False)
     num_views_per_ddoc = 10
     create_threads = []
     ddocs = []
     for i in range(num_views_per_ddoc):
         ddoc = DesignDocument(self.default_ddoc_name + str(i), [],
                               spatial_views=[
                                   View(self.default_view_name +
                                        (str(i), "")[same_names],
                                        self.default_map,
                                        dev_view=self.use_dev_views,
                                        is_spatial=True)
                               ])
         ddocs.append(ddoc)
     if self.ddoc_op == 'update' or self.ddoc_op == 'delete':
         self.create_ddocs(ddocs)
     i = 0
     for ddoc in ddocs:
         create_thread = Thread(target=self.perform_ddoc_ops,
                                name="ops_thread" + str(i),
                                args=([
                                    ddoc,
                                ], ))
         i += 1
         create_threads.append(create_thread)
         create_thread.start()
     for create_thread in create_threads:
         create_thread.join()
     if self.thread_crashed.is_set():
         self.fail("Error occured during run")
Esempio n. 11
0
 def test_rename_with_index(self):
     self.defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
     self.default_view_name = "default_view"
     self.default_view = View(self.default_view_name, self.defaul_map_func,
                              None)
     num_views = self.input.param('num_views', 1)
     ddoc_name = views_prefix = 'hostname_mgmt'
     query = {'stale': 'false'}
     views = []
     for bucket in self.buckets:
         view = self.make_default_views(views_prefix,
                                        num_views,
                                        different_map=True)
         self.create_views(self.master, ddoc_name, view, bucket)
         views += view
     for bucket in self.buckets:
         for view in views:
             self.cluster.query_view(self.master, ddoc_name, view.name,
                                     query)
     hostnames = self.rename_nodes(self.servers[:self.nodes_init])
     self.verify_referenced_by_names(self.servers[:self.nodes_init],
                                     hostnames)
     self._set_hostames_to_servers_objs(hostnames)
     query = {'stale': 'ok'}
     for server in self.servers[:self.nodes_init]:
         for view in views:
             self.cluster.query_view(server, ddoc_name, view.name, query)
    def pass_encrypted_in_logs_test(self):
        self.bucket_size = self._get_bucket_size(self.quota, 1)
        self._create_sasl_buckets(self.master, 1, password='******')
        bucket = self.buckets[-1]

        if self.input.param("load", 0):
            self.num_items = self.input.param("load", 0)
            self._load_doc_data_all_buckets()
        if self.input.param("views", 0):
            views = []
            for i in range(self.input.param("views", 0)):
                views.append(View("view_sasl" + str(i),
                                  'function (doc, meta) {'
                                  'emit(meta.id, "emitted_value%s");}' % str(i),
                                  None, False))
            self.create_views(self.master, "ddoc", views, bucket)
        if self.input.param("rebalance", 0):
            self.cluster.rebalance(self.servers[:self.nodes_init],
                                   self.servers[self.nodes_init:self.nodes_init + self.input.param("rebalance", 0)],
                                   [])

        for server in self.servers[:self.nodes_init]:
            for log_file in ['debug', 'info', 'views', 'xdcr']:
                self.assertFalse(RemoteUtilHelper.is_text_present_in_logs(server, bucket.saslPassword, logs_to_check=log_file),
                                 "%s logs contains password in plain text" % log_file)
Esempio n. 13
0
    def collectinfo_test_for_views(self):
        self.default_design_doc_name = "Doc1"
        self.view_name = self.input.param("view_name", "View")
        self.generate_map_reduce_error = self.input.param(
            "map_reduce_error", False)
        self.default_map_func = 'function (doc) { emit(doc.age, doc.first_name);}'
        self.gen_load = BlobGenerator('couch',
                                      'cb-',
                                      self.value_size,
                                      end=self.num_items)
        self._load_all_buckets(self.master, self.gen_load, "create", 0)
        self.reduce_fn = "_count"
        expected_num_items = self.num_items
        if self.generate_map_reduce_error:
            self.reduce_fn = "_sum"
            expected_num_items = None

        view = View(self.view_name,
                    self.default_map_func,
                    self.reduce_fn,
                    dev_view=False)
        self.cluster.create_view(self.master, self.default_design_doc_name,
                                 view, 'default', self.wait_timeout * 2)
        query = {"stale": "false", "connection_timeout": 60000}
        try:
            self.cluster.query_view(self.master,
                                    self.default_design_doc_name,
                                    self.view_name,
                                    query,
                                    expected_num_items,
                                    'default',
                                    timeout=self.wait_timeout)
        except Exception, ex:
            if not self.generate_map_reduce_error:
                raise ex
Esempio n. 14
0
    def test_views_during_ddoc_compaction(self):
        fragmentation_value = self.input.param("fragmentation_value", 80)
        ddoc_to_compact = DesignDocument(
            "ddoc_to_compact", [],
            spatial_views=[
                View(self.default_view_name,
                     'function (doc) { emit(doc.age, doc.name);}',
                     dev_view=self.use_dev_views)
            ])
        ddocs = self.make_ddocs(self.num_ddoc, self.views_per_ddoc, 0)
        self.disable_compaction()
        self.create_ddocs([
            ddoc_to_compact,
        ])
        fragmentation_monitor = self.cluster.async_monitor_view_fragmentation(
            self.master, ddoc_to_compact.name, fragmentation_value,
            self.default_bucket_name)
        end_time = time.time() + self.wait_timeout * 30
        while fragmentation_monitor.state != "FINISHED" and end_time > time.time(
        ):
            self.helper.insert_docs(self.num_items, 'spatial-doc')

        if end_time < time.time(
        ) and fragmentation_monitor.state != "FINISHED":
            self.fail("impossible to reach compaction value after %s sec" %
                      (self.wait_timeout * 20))
        fragmentation_monitor.result()
        compaction_task = self.cluster.async_compact_view(
            self.master, ddoc_to_compact.name, self.default_bucket_name)
        self.perform_ddoc_ops(ddocs)
        result = compaction_task.result(self.wait_timeout * 10)
        self.assertTrue(
            result, "Compaction didn't finished correctly. Please check diags")
Esempio n. 15
0
 def setUp(self):
     try:
         super(CreateDeleteViewTests, self).setUp()
         self.bucket_ddoc_map = {}
         self.ddoc_ops = self.input.param("ddoc_ops", None)
         self.boot_op = self.input.param("boot_op", None)
         self.nodes_in = self.input.param("nodes_in", 1)
         self.nodes_out = self.input.param("nodes_out", 1)
         self.test_with_view = self.input.param("test_with_view", False)
         self.num_views_per_ddoc = self.input.param("num_views_per_ddoc", 1)
         self.num_ddocs = self.input.param("num_ddocs", 1)
         self.gen = None
         self.default_design_doc_name = "Doc1"
         self.default_map_func = 'function (doc) { emit(doc.age, doc.first_name);}'
         self.updated_map_func = 'function (doc) { emit(null, doc);}'
         self.default_view = View("View", self.default_map_func, None,
                                  False)
         self.fragmentation_value = self.input.param(
             "fragmentation_value", 80)
         self.nodes_init = self.input.param("nodes_init", 1)
         self.nodes_in = self.input.param("nodes_in", 1)
         self.nodes_out = self.input.param("nodes_out", 1)
         self.wait_timeout = self.input.param("wait_timeout", 60)
         nodes_init = self.cluster.servers[
             1:self.nodes_init] if self.nodes_init != 1 else []
         self.task.rebalance([self.cluster.master], nodes_init, [])
         self.cluster.nodes_in_cluster.append(self.cluster.master)
         self.bucket_util.create_default_bucket()
         self.bucket_util.add_rbac_user()
     except Exception as ex:
         self.input.test_params["stop-on-failure"] = True
         self.log.error("SETUP WAS FAILED. ALL TESTS WILL BE SKIPPED")
         self.fail(ex)
Esempio n. 16
0
 def test_add_views_to_1_ddoc(self):
     same_names = self.input.param('same-name', False)
     error = self.input.param('error', None)
     num_views_per_ddoc = 10
     create_threads = []
     try:
         for i in range(num_views_per_ddoc):
             ddoc = DesignDocument(self.default_ddoc_name, [],
                                   spatial_views=[
                                       View(self.default_view_name +
                                            (str(i), "")[same_names],
                                            self.default_map,
                                            dev_view=self.use_dev_views,
                                            is_spatial=True)
                                   ])
             create_thread = Thread(target=self.create_ddocs,
                                    name="create_thread" + str(i),
                                    args=([
                                        ddoc,
                                    ], ))
             create_threads.append(create_thread)
             create_thread.start()
         for create_thread in create_threads:
             create_thread.join()
     except Exception as ex:
         if error and str(ex).find(error) != -1:
             self.log.info("Error caught as expected %s" % error)
             return
         else:
             self.fail("Unexpected error appeared during run %s" % ex)
     if error:
         self.fail("Expected error '%s' didn't appear" % error)
Esempio n. 17
0
 def _create_views(self):
     default_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
     default_view_name = "test"
     default_ddoc_name = "ddoc_test"
     prefix = "dev_"
     query = {"full_set": "true", "stale": "false", "connection_timeout": 60000}
     view = View(default_view_name, default_map_func)
     task = self.cluster.async_create_view(self.master, default_ddoc_name, view, "default")
     task.result()
Esempio n. 18
0
 def setUp(self):
     super(RebalanceBaseTest, self).setUp()
     self.doc_ops = self.input.param("doc_ops", "create")
     self.doc_size = self.input.param("doc_size", 10)
     self.key_size = self.input.param("key_size", 0)
     self.zone = self.input.param("zone", 1)
     self.new_replica = self.input.param("new_replica", None)
     self.default_view_name = "default_view"
     self.defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
     self.default_view = View(self.default_view_name, self.defaul_map_func,
                              None)
     self.max_verify = self.input.param("max_verify", None)
     self.std_vbucket_dist = self.input.param("std_vbucket_dist", None)
     self.key = 'test_docs'.rjust(self.key_size, '0')
     nodes_init = self.cluster.servers[
         1:self.nodes_init] if self.nodes_init != 1 else []
     self.task.rebalance([self.cluster.master], nodes_init, [])
     self.cluster.nodes_in_cluster.extend([self.cluster.master] +
                                          nodes_init)
     self.bucket_util.create_default_bucket(replica=self.num_replicas)
     self.bucket_util.add_rbac_user()
     self.sleep(10)
     gen_create = self.get_doc_generator(0, self.num_items)
     self.print_cluster_stat_task = self.cluster_util.async_print_cluster_stats(
     )
     for bucket in self.bucket_util.buckets:
         task = self.task.async_load_gen_docs(
             self.cluster,
             bucket,
             gen_create,
             "create",
             0,
             persist_to=self.persist_to,
             replicate_to=self.replicate_to,
             batch_size=10,
             timeout_secs=self.sdk_timeout,
             process_concurrency=8,
             retries=self.sdk_retries,
             durability=self.durability_level)
         self.task.jython_task_manager.get_task_result(task)
         self.sleep(20)
         current_item = self.bucket_util.get_bucket_current_item_count(
             self.cluster, bucket)
         self.num_items = current_item
         self.log.info("Inserted {} number of items after loadgen".format(
             self.num_items))
     self.gen_load = self.get_doc_generator(0, self.num_items)
     # gen_update is used for doing mutation for 1/2th of uploaded data
     self.gen_update = self.get_doc_generator(0, (self.num_items / 2))
     self.durability_helper = DurabilityHelper(
         self.log,
         len(self.cluster.nodes_in_cluster),
         durability=self.durability_level,
         replicate_to=self.replicate_to,
         persist_to=self.persist_to)
     self.log.info("==========Finished rebalance base setup========")
Esempio n. 19
0
    def _verify_with_views(self, expected_rows):

        for bucket in self.buckets:
            default_map_func = 'function (doc, meta) { emit(meta.id, null);}'
            default_view = View("View", default_map_func, None, False)
            ddoc_name = "key_ddoc"

            self.create_views(self.cluster.master, ddoc_name, [default_view],
                              bucket.name)
            query = {"stale": "false", "connection_timeout": 60000}
            self.cluster.query_view(self.cluster.master, ddoc_name,
                                    default_view.name, query,
                                    expected_rows, bucket=bucket.name)
Esempio n. 20
0
 def create_ddocs(self, is_dev_view):
     mapview = View(self.map_view_name,
                    '''function(doc) {
          emit(doc.integer, doc.string);
       }''',
                    dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test', mapview)
     redview = View(self.red_view_name,
                    '''function(doc) {
          emit([doc.integer, doc.string], doc.integer);
       }''',
                    '''_count''',
                    dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test', redview)
     redview_stats = View(self.red_view_stats_name,
                          '''function(doc) {
          emit(doc.string, doc.string);
       }''',
                          '''_stats''',
                          dev_view=is_dev_view)
     self.cluster.create_view(self.master, 'test2', redview_stats)
     RebalanceHelper.wait_for_persistence(self.master, self.bucket, 0)
Esempio n. 21
0
    def setUp(self):
        super(RebalanceBaseTest, self).setUp()
        self.value_size = self.input.param("value_size", 256)
        self.doc_ops = self.input.param("doc_ops", None)
        self.withMutationOps = self.input.param("withMutationOps", False)
        self.total_vbuckets = self.input.param("total_vbuckets", 1024)
        if self.doc_ops is not None:
            self.doc_ops = self.doc_ops.split(":")
        self.defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
        self.default_view_name = "default_view"
        self.default_view = View(self.default_view_name, self.defaul_map_func,
                                 None)
        self.std_vbucket_dist = self.input.param("std_vbucket_dist", None)
        self.zone = self.input.param("zone", 1)
        # define the data that will be used to test
        self.blob_generator = self.input.param("blob_generator", True)

        # To validate MB-34173
        self.sleep_before_rebalance = \
            self.input.param("sleep_before_rebalance", None)
        self.flusher_total_batch_limit = \
            self.input.param("flusher_total_batch_limit", None)

        if self.flusher_total_batch_limit:
            self.set_flusher_total_batch_limit(self.flusher_total_batch_limit,
                                               self.buckets)

        if self.blob_generator:
            # gen_load is used to create initial docs (1000 items by default)
            self.gen_load = BlobGenerator('mike',
                                          'mike-',
                                          self.value_size,
                                          end=self.num_items)
            # gen_update is used for doing mutation for 1/2th of uploaded data
            self.gen_update = BlobGenerator('mike',
                                            'mike-',
                                            self.value_size,
                                            end=(self.num_items // 2 - 1))
            # upload data before each test
            self._load_all_buckets(self.servers[0],
                                   self.gen_load,
                                   "create",
                                   0,
                                   flag=2,
                                   batch_size=20000)
        else:
            self._load_doc_data_all_buckets(batch_size=20000)

        # Validate seq_no snap_start/stop values with initial load
        self.check_snap_start_corruption()
Esempio n. 22
0
    def test_ephemeral_bucket_views(self):
        default_map_func = "function (doc, meta) {emit(meta.id, null);}"
        default_view_name = ("xattr", "default_view")[False]
        view = View(default_view_name, default_map_func, None, False)

        ddoc_name = "ddoc1"
        tasks = self.async_create_views(self.master, ddoc_name, [view], self.buckets[0].name)
        for task in tasks:
            try:
                task.result()
                self.fail("Views not allowed for ephemeral buckets")
            except DesignDocCreationException as e:
                self.assertEqual(e._message,
                                  'Error occured design document _design/ddoc1: {"error":"not_found","reason":"views are supported only on couchbase buckets"}\n')
Esempio n. 23
0
 def setUp(self):
     super(CCCP, self).setUp()
     self.map_fn = 'function (doc){emit([doc.join_yr, doc.join_mo],doc.name);}'
     self.ddoc_name = "cccp_ddoc"
     self.view_name = "cccp_view"
     self.default_view = View(self.view_name, self.map_fn, None, False)
     self.ops = self.input.param("ops", None)
     self.clients = {}
     try:
         for bucket in self.buckets:
             self.clients[bucket.name] =\
               MemcachedClientHelper.direct_client(self.master, bucket.name)
     except:
         self.tearDown()
Esempio n. 24
0
 def _create_views(self, ddocs_num, buckets, views_num, server):
     ddocs = []
     if ddocs_num:
         self.default_view = View(self.default_view_name, None, None)
         for bucket in buckets:
             for i in xrange(ddocs_num):
                 views = self.make_default_views(self.default_view_name, views_num,
                                                 self.is_dev_ddoc, different_map=True)
                 ddoc = DesignDocument(self.default_view_name + str(i), views)
                 bucket_server = self._get_bucket(bucket, server)
                 tasks = self.async_create_views(server, ddoc.name, views, bucket=bucket_server)
                 for task in tasks:
                     task.result(timeout=90)
                 ddocs.append(ddoc)
     return ddocs
    def  cluster_bucket_views_read(self,username,password,host,port=8091, servers=None,cluster=None,httpCode=None,user_role=None):

        cluster.create_view(host,"Doc1",View('abcd', 'function (doc) { emit(doc.age, doc.first_name);}', None),'default',180,with_query=False)
        doc_id = "_design/dev_Doc1"

        _cluster_bucket_views_read ={
            "getview":"pools/default/buckets/<bucket_name>/ddocs;GET"
        }

        for perm in _cluster_bucket_views_read:
            temp = _cluster_bucket_views_read[perm]
            temp = temp.replace('<bucket_name>','default')
            _cluster_bucket_views_read[perm] = temp

        result = self._return_http_code(_cluster_bucket_views_read,username,password,host=host,port=8092, httpCode=httpCode, user_role=user_role)
Esempio n. 26
0
 def create_ddocs_and_views(self):
     self.default_view = View(self.default_view_name, None, None)
     for bucket in self.buckets:
         for i in xrange(int(self.ddocs_num)):
             views = self.make_default_views(self.default_view_name,
                                             self.view_num,
                                             self.is_dev_ddoc,
                                             different_map=True)
             ddoc = DesignDocument(self.default_view_name + str(i), views)
             self.ddocs.append(ddoc)
             for view in views:
                 self.cluster.create_view(self.master,
                                          ddoc.name,
                                          view,
                                          bucket=bucket)
    def cluster_bucket_views_compact(self,username,password,host,port=8091, servers=None,cluster=None,httpCode=None,user_role=None):

        cluster.create_view(host,"Doc1",View('abcd', 'function (doc) { emit(doc.age, doc.first_name);}', None),'default',180,with_query=False)
        doc_id = "_design%2Fdev_Doc1"

        _cluster_bucket_views_compact ={
            "compact_view":"pools/default/buckets/<bucket_name>/ddocs/<doc_id>/controller/compactView;POST",
            "cancel_compact":"pools/default/buckets/<bucket_name>/ddocs/<doc_id>/controller/cancelViewCompaction;POST"
        }

        for perm in _cluster_bucket_views_compact:
            temp = _cluster_bucket_views_compact[perm]
            temp = temp.replace('<bucket_name>','default')
            temp = temp.replace('<doc_id>',doc_id)
            _cluster_bucket_views_compact[perm] = temp

        result = self._return_http_code(_cluster_bucket_views_compact,username,password,host=host,port=8092, httpCode=httpCode, user_role=user_role)
    def setUp(self):
        super(ObserveXdcrTest, self).setUp()
        self.src_cluster = self.get_cb_cluster_by_name('C1')
        self.src_master = self.src_cluster.get_master_node()
        self.dest_cluster = self.get_cb_cluster_by_name('C2')
        self.dest_master = self.dest_cluster.get_master_node()

        # Variables defined for _run_observe() in observetest.
        self.observe_with = self._input.param("observe_with", "")
        self.default_map_func = 'function (doc) { emit(doc.age, doc.first_name);}'
        self.default_design_doc = "Doc1"
        map_func = 'function (doc) { emit(null, doc);}'
        self.default_view = View("default_view", map_func, None)
        self.mutate_by = self._input.param("mutate_by", "set")
        self.cluster = Cluster()
        self.wait_timeout = self._wait_timeout
        self.num_items = self._num_items
Esempio n. 29
0
 def setUp(self):
     super(CompactionViewTests, self).setUp()
     self.value_size = self.input.param("value_size", 256)
     self.fragmentation_value = self.input.param("fragmentation_value", 80)
     self.ddocs_num = self.input.param("ddocs_num", 1)
     self.view_per_ddoc = self.input.param("view_per_ddoc", 2)
     self.use_dev_views = self.input.param("use_dev_views", False)
     self.default_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
     self.default_view_name = "default_view"
     self.default_view = View(self.default_view_name, self.default_map_func,
                              None)
     self.ddocs = []
     self.gen_load = BlobGenerator('test_view_compaction',
                                   'test_view_compaction-',
                                   self.value_size,
                                   end=self.num_items)
     self.thread_crashed = Event()
     self.thread_stopped = Event()
Esempio n. 30
0
 def setUp(self):
     super(XDCRViewTests, self).setUp()
     self.bucket_ddoc_map = {}
     self.ddoc_ops = self._input.param("ddoc_ops", None)
     self.ddoc_ops_dest = self._input.param("ddoc_ops_dest", None)
     self.stale_param = self._input.param("stale_param", "false")
     self.num_views_per_ddoc = self._input.param("num_views_per_ddoc", 1)
     self.num_ddocs = self._input.param("num_ddocs", 1)
     self.test_with_view = True
     self.updated_map_func = 'function (doc) { emit(null, doc);}'
     self.src_cluster = self.get_cb_cluster_by_name('C1')
     self.src_master = self.src_cluster.get_master_node()
     self.dest_cluster = self.get_cb_cluster_by_name('C2')
     self.dest_master = self.dest_cluster.get_master_node()
     self.defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
     self.default_view_name = "default_view"
     self.default_view = View(self.default_view_name, self.defaul_map_func, None)
     self.cluster = self.src_cluster.get_cluster()