示例#1
0
    def test_create_delete_similar_views(self):
        ddoc_name_prefix = self.input.param("ddoc_name_prefix", "ddoc")
        view_name = self.input.param("view_name", "test_view")
        map_fn = 'function (doc) {if(doc.age !== undefined) { emit(doc.age, doc.name);}}'
        rest = RestConnection(self.servers[0])
        ddocs = [DesignDocument(ddoc_name_prefix + "1", [View(view_name, map_fn,
                                                             dev_view=False)],
                                options={"updateMinChanges":0, "replicaUpdateMinChanges":0}),
                DesignDocument(ddoc_name_prefix + "2", [View(view_name, map_fn,
                                                            dev_view=True)],
                               options={"updateMinChanges":0, "replicaUpdateMinChanges":0})]

        ViewBaseTests._load_docs(self, self.num_docs, "test_")
        for ddoc in ddocs:
            results = self.create_ddoc(rest, 'default', ddoc)

        try:
            cluster = Cluster()
            cluster.delete_view(self.servers[0], ddocs[1].name, ddocs[1].views[0])
        finally:
            cluster.shutdown()

        results_new = rest.query_view(ddocs[0].name, ddocs[0].views[0].name, 'default',
                                  {"stale" : "ok", "full_set" : "true"})
        self.assertEquals(results.get(u'rows', []), results_new.get(u'rows', []),
                          "Results returned previosly %s don't match with current %s" % (
                          results.get(u'rows', []), results_new.get(u'rows', [])))
示例#2
0
 def test_add_spatial_views_case_sensative(self):
     ddoc = DesignDocument(self.default_ddoc_name, [],
                           spatial_views=[
                               View(self.default_view_name,
                                    self.default_map,
                                    dev_view=self.use_dev_views,
                                    is_spatial=True),
                               View(self.default_view_name.upper(),
                                    self.default_map,
                                    dev_view=self.use_dev_views,
                                    is_spatial=True)
                           ])
     self.create_ddocs([ddoc])
示例#3
0
 def make_default_views(self, prefix, count, is_dev_ddoc=False, different_map=False):
     ref_view = self.default_view
     ref_view.name = (prefix, ref_view.name)[prefix is None]
     if different_map:
         views = []
         for i in xrange(count):
             views.append(View(ref_view.name + str(i),
                               'function (doc, meta) {'
                               'emit(meta.id, "emitted_value%s");}' % str(i),
                               None, is_dev_ddoc))
         return views
     else:
         return [View(ref_view.name + str(i), ref_view.map_func, None, is_dev_ddoc) for i in xrange(count)]
示例#4
0
 def test_invalid_map_fn_view(self):
     self._load_doc_data_all_buckets()
     views = [View("view1", 'function (doc) { emit(doc.age, doc.first_name);',
                   red_func=None, dev_view=False),
              View("view1", self.default_map_func,
                   red_func='abc', dev_view=False),
              View("view1", 'function (doc)',
                   red_func=None, dev_view=False)]
     for view in views:
         with self.assertRaises(DesignDocCreationException):
             self.cluster.create_view(
                 self.master, self.default_design_doc_name, view,
                 'default', self.wait_timeout * 2)
             self.fail("server allowed creation of invalid view")
示例#5
0
 def create_ddocs(self):
     mapview = View(self.map_view_name, '''function(doc) {
          emit(doc.integer, doc.string);
       }''', dev_view=self.is_dev_view)
     self.cluster.create_view(self.master, 'test', mapview)
     redview = View(self.red_view_name, '''function(doc) {
          emit([doc.integer, doc.string], doc.integer);
       }''', '''_count''', dev_view=self.is_dev_view)
     self.cluster.create_view(self.master, 'test', redview)
     redview_stats = View(self.red_view_stats_name, '''function(doc) {
          emit(doc.string, doc.string);
       }''', '''_stats''', dev_view=self.is_dev_view)
     self.cluster.create_view(self.master, 'test2', redview_stats)
     RebalanceHelper.wait_for_persistence(self.master, self.bucket, 0)
示例#6
0
    def test_views_during_ddoc_compaction(self):
        fragmentation_value = self.input.param("fragmentation_value", 80)
        ddoc_to_compact = DesignDocument(
            "ddoc_to_compact", [],
            spatial_views=[
                View(self.default_view_name,
                     'function (doc) { emit(doc.age, doc.name);}',
                     dev_view=self.use_dev_views)
            ])
        ddocs = self.make_ddocs(self.num_ddoc, self.views_per_ddoc, 0)
        self.disable_compaction()
        self.create_ddocs([
            ddoc_to_compact,
        ])
        fragmentation_monitor = self.cluster.async_monitor_view_fragmentation(
            self.master, ddoc_to_compact.name, fragmentation_value,
            self.default_bucket_name)
        end_time = time.time() + self.wait_timeout * 30
        while fragmentation_monitor.state != "FINISHED" and end_time > time.time(
        ):
            self.helper.insert_docs(self.num_items,
                                    'spatial-doc',
                                    wait_for_persistence=True)

        if end_time < time.time(
        ) and fragmentation_monitor.state != "FINISHED":
            self.fail("impossible to reach compaction value after %s sec" %
                      (self.wait_timeout * 20))
        fragmentation_monitor.result()
        compaction_task = self.cluster.async_compact_view(
            self.master, ddoc_to_compact.name, self.default_bucket_name)
        self.perform_ddoc_ops(ddocs)
        result = compaction_task.result(self.wait_timeout * 10)
        self.assertTrue(
            result, "Compaction didn't finished correctly. Please check diags")
示例#7
0
 def test_add_spatial_views_threads(self):
     same_names = self.input.param('same-name', False)
     num_views_per_ddoc = 10
     create_threads = []
     ddocs = []
     for i in xrange(num_views_per_ddoc):
         ddoc = DesignDocument(self.default_ddoc_name + str(i), [],
                               spatial_views=[
                                   View(self.default_view_name +
                                        (str(i), "")[same_names],
                                        self.default_map,
                                        dev_view=self.use_dev_views,
                                        is_spatial=True)
                               ])
         ddocs.append(ddoc)
     if self.ddoc_op == 'update' or self.ddoc_op == 'delete':
         self.create_ddocs(ddocs)
     i = 0
     for ddoc in ddocs:
         create_thread = Thread(target=self.perform_ddoc_ops,
                                name="ops_thread" + str(i),
                                args=([
                                    ddoc,
                                ], ))
         i += 1
         create_threads.append(create_thread)
         create_thread.start()
     for create_thread in create_threads:
         create_thread.join()
     if self.thread_crashed.is_set():
         self.fail("Error occured during run")
示例#8
0
 def test_add_views_to_1_ddoc(self):
     same_names = self.input.param('same-name', False)
     error = self.input.param('error', None)
     num_views_per_ddoc = 10
     create_threads = []
     try:
         for i in xrange(num_views_per_ddoc):
             ddoc = DesignDocument(self.default_ddoc_name, [],
                                   spatial_views=[
                                       View(self.default_view_name +
                                            (str(i), "")[same_names],
                                            self.default_map,
                                            dev_view=self.use_dev_views,
                                            is_spatial=True)
                                   ])
             create_thread = Thread(target=self.create_ddocs,
                                    name="create_thread" + str(i),
                                    args=([
                                        ddoc,
                                    ], ))
             create_threads.append(create_thread)
             create_thread.start()
         for create_thread in create_threads:
             create_thread.join()
     except Exception as ex:
         if error and str(ex).find(error) != -1:
             self.log.info("Error caught as expected %s" % error)
             return
         else:
             self.fail("Unexpected error appeared during run %s" % ex)
     if error:
         self.fail("Expected error '%s' didn't appear" % error)
示例#9
0
    def pass_encrypted_in_logs_test(self):
        self.bucket_size = self._get_bucket_size(self.quota, 1)
        self._create_sasl_buckets(self.master, 1, password='******')
        bucket = self.buckets[-1]

        if self.input.param("load", 0):
            self.num_items = self.input.param("load", 0)
            self._load_doc_data_all_buckets()
        if self.input.param("views", 0):
            views = []
            for i in xrange(self.input.param("views", 0)):
                views.append(
                    View(
                        "view_sasl" + str(i), 'function (doc, meta) {'
                        'emit(meta.id, "emitted_value%s");}' % str(i), None,
                        False))
            self.create_views(self.master, "ddoc", views, bucket)
        if self.input.param("rebalance", 0):
            self.cluster.rebalance(
                self.servers[:self.nodes_init],
                self.servers[self.nodes_init:self.nodes_init +
                             self.input.param("rebalance", 0)], [])

        for server in self.servers[:self.nodes_init]:
            for log_file in ['debug', 'info', 'views', 'xdcr']:
                self.assertFalse(
                    RemoteUtilHelper.is_text_present_in_logs(
                        server, bucket.saslPassword, logs_to_check=log_file),
                    "%s logs contains password in plain text" % log_file)
示例#10
0
def perform_view_tasks(viewMsgList):
    rest = create_rest()

    if isinstance(viewMsgList,dict):
        viewMsgList = [viewMsgList]

    for viewMsg in viewMsgList:
        if "create" in viewMsg:
            ddocMsg = parseDdocMsg(viewMsg['create'])
            for ddoc_name, views in ddocMsg.iteritems():
                view_list = []
                bucket_name = ''
                for view in views:
                    view_list.append(View(view['view_name'], view['map_func'], view['red_func'],
                                          view['dev_view'], view['is_spatial']))
                    bucket_name = view['bucket_name']

                bucket_obj = rest.get_bucket(bucket_name, 2, 2)
                rest.create_ddoc(ddoc_name, bucket_obj, view_list)

        if "delete" in viewMsg:
            for view in viewMsg['delete']:
                viewMsgParsed = parseViewMsg(view)
                bucket_obj = rest.get_bucket(viewMsgParsed['bucket_name'], 2, 2)
                rest.delete_view(bucket_obj, viewMsgParsed['ddoc_name'])
示例#11
0
    def setUp(self):
        super(RebalanceBaseTest, self).setUp()
        self.value_size = self.input.param("value_size", 256)
        self.doc_ops = self.input.param("doc_ops", None)
        if self.doc_ops is not None:
            self.doc_ops = self.doc_ops.split(";")
        self.defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
        self.default_view_name = "default_view"
        self.default_view = View(self.default_view_name, self.defaul_map_func,
                                 None)

        #define the data that will be used to test
        self.blob_generator = self.input.param("blob_generator", True)
        if self.blob_generator:
            #gen_load data is used for upload before each test(1000 items by default)
            self.gen_load = BlobGenerator('mike',
                                          'mike-',
                                          self.value_size,
                                          end=self.num_items)
            #gen_update is used for doing mutation for 1/2th of uploaded data
            self.gen_update = BlobGenerator('mike',
                                            'mike-',
                                            self.value_size,
                                            end=(self.num_items / 2 - 1))
            #upload data before each test
            self._load_all_buckets(self.servers[0], self.gen_load, "create", 0)
        else:
            self._load_doc_data_all_buckets()
示例#12
0
 def make_default_views(self, prefix, count, is_dev_ddoc=False):
     ref_view = self.default_view
     ref_view.name = (prefix, ref_view.name)[prefix is None]
     return [
         View(ref_view.name + str(i), ref_view.map_func, None, is_dev_ddoc)
         for i in xrange(count)
     ]
示例#13
0
    def test_add_single_spatial_view(self):
        name_lenght = self.input.param('name_lenght', None)
        view_name = self.input.param('view_name', self.default_view_name)
        if name_lenght:
            view_name = ''.join(
                random.choice(string.lowercase) for x in xrange(name_lenght))
        not_compilable = self.input.param('not_compilable', False)
        error = self.input.param('error', None)
        map_fn = (
            self.default_map,
            'function (doc) {emit(doc.geometry, doc.age);')[not_compilable]

        ddoc = DesignDocument(self.default_ddoc_name, [],
                              spatial_views=[
                                  View(view_name,
                                       map_fn,
                                       dev_view=self.use_dev_views,
                                       is_spatial=True)
                              ])
        try:
            self.create_ddocs([ddoc])
        except Exception as ex:
            if error and str(ex).find(error) != -1:
                self.log.info("Error caught as expected %s" % error)
                return
            else:
                self.fail("Unexpected error appeared during run %s" % ex)
        if error:
            self.fail("Expected error '%s' didn't appear" % error)
示例#14
0
 def _create_view_doc_name(self, prefix, bucket='default'):
     self.log.info("description : create a view")
     master = self.servers[0]
     rest = RestConnection(master)
     view_name = "dev_test_view-{0}".format(prefix)
     map_fn = "function (doc) {if(doc.name.indexOf(\"" + prefix + "-\") != -1) { emit(doc.name, doc);}}"
     rest.create_view(view_name, bucket, [View(view_name, map_fn, dev_view=False)])
     self.created_views[view_name] = bucket
     return view_name
示例#15
0
 def create_default_views(self, is_one_ddoc=False):
     views = [
         View(self.testcase.default_view_name + "0",
              'function (doc) {emit(doc.geometry, doc.age);}',
              dev_view=self.testcase.use_dev_views,
              is_spatial=True),
         View(self.testcase.default_view_name + "1",
              'function (doc) {emit(doc.geometry, null);}',
              dev_view=self.testcase.use_dev_views,
              is_spatial=True),
         View(self.testcase.default_view_name + "2",
              'function (doc) {emit(doc.geometry, doc.name);}',
              dev_view=self.testcase.use_dev_views,
              is_spatial=True),
         View(self.testcase.default_view_name + "3",
              'function (doc) {emit(doc.geometry, [doc.name, doc.age]);}',
              dev_view=self.testcase.use_dev_views,
              is_spatial=True),
         View(
             self.testcase.default_view_name + "4",
             'function (doc) {emit(doc.geometry, {result : {age:doc.age}});}',
             dev_view=self.testcase.use_dev_views,
             is_spatial=True)
     ]
     ddocs = []
     if is_one_ddoc:
         ddocs.append(
             DesignDocument(self.testcase.default_ddoc_name, [],
                            spatial_views=views))
     else:
         for i in xrange(5):
             ddocs.append(
                 DesignDocument(self.testcase.default_ddoc_name + str(i),
                                [],
                                spatial_views=[views[i]]))
     for ddoc in ddocs:
         for view in ddoc.spatial_views:
             self.testcase.cluster.create_view(
                 self.testcase.master,
                 ddoc.name,
                 view,
                 bucket=self.testcase.bucket_name)
     return ddocs
示例#16
0
    def rebalance_in_with_ddoc_compaction(self):
        fragmentation_value = self.input.param("fragmentation_value", 80)
        is_dev_ddoc = False
        ddoc_name = "ddoc_compaction"
        map_fn_2 = "function (doc) { if (doc.first_name == 'sharon') {emit(doc.age, doc.first_name);}}"

        ddoc = DesignDocument(ddoc_name, [View(ddoc_name + "0", self.default_map_func,
                                               None,
                                               dev_view=is_dev_ddoc),
                                          View(ddoc_name + "1",
                                               map_fn_2, None,
                                               dev_view=is_dev_ddoc)])
        prefix = ("", "dev_")[is_dev_ddoc]
        query = {"connectionTimeout" : 60000}
        self.disable_compaction()

        for view in ddoc.views:
            self.cluster.create_view(self.master, ddoc.name, view, bucket=self.default_bucket_name)

        generator = self._load_doc_data_all_buckets()
        RebalanceHelper.wait_for_persistence(self.master, self.default_bucket_name)

        # generate load until fragmentation reached
        rebalance = self.cluster.async_rebalance([self.master], self.servers[1:self.nodes_in + 1], [])
        while rebalance.state != "FINISHED":
            fragmentation_monitor = self.cluster.async_monitor_view_fragmentation(self.master,
                             prefix + ddoc_name, fragmentation_value, self.default_bucket_name)
            end_time = time.time() + self.wait_timeout * 30
            while fragmentation_monitor.state != "FINISHED" and end_time > time.time():
                # update docs to create fragmentation
                self._load_doc_data_all_buckets("update", gen_load=generator)
                for view in ddoc.views:
                    # run queries to create indexes
                    self.cluster.query_view(self.master, prefix + ddoc_name, view.name, query)
            if end_time < time.time() and fragmentation_monitor.state != "FINISHED":
                self.fail("impossible to reach compaction value after %s sec" % (self.wait_timeout * 20))
            fragmentation_monitor.result()
            compaction_task = self.cluster.async_compact_view(self.master, prefix + ddoc_name,
                                                              self.default_bucket_name, with_rebalance=True)
            result = compaction_task.result(self.wait_timeout * 10)
            self.assertTrue(result, "Compaction didn't finished correctly. Please check diags")
        rebalance.result()
示例#17
0
 def test_create_view_multi_map_fun(self):
     self._load_doc_data_all_buckets()
     get_compile = self.input.param("get_compile", True)
     map_fun = self._get_complex_map(get_compile)
     view = View("View1", map_fun, None, False)
     self.cluster.create_view(self.master, self.default_design_doc_name,
                              view, 'default', self.wait_timeout * 2)
     self.view_list.append(view.name)
     self.ddoc_view_map[self.default_design_doc_name] = self.view_list
     self.bucket_ddoc_map['default'] = self.ddoc_view_map
     self._verify_ddoc_ops_all_buckets()
     self._verify_ddoc_data_all_buckets()
示例#18
0
    def _test_view_on_multiple_docs(self, num_docs, params={"stale":"update_after"}, delay=10):
        self.log.info("description : create a view on {0} documents".format(num_docs))
        master = self.servers[0]
        rest = RestConnection(master)
        bucket = "default"
        view_name = "dev_test_view_on_{1}_docs-{0}".format(str(uuid.uuid4())[:7], self.num_docs)
        map_fn = "function (doc) {if(doc.name.indexOf(\"" + view_name + "\") != -1) { emit(doc.name, doc);}}"
        rest.create_view(view_name, bucket, [View(view_name, map_fn, dev_view=False)])
        self.created_views[view_name] = bucket
        rest = RestConnection(self.servers[0])
        smart = VBucketAwareMemcached(rest, bucket)
        doc_names = []
        prefix = str(uuid.uuid4())[:7]
        total_time = 0
        self.log.info("inserting {0} json objects".format(num_docs))
        for i in range(0, num_docs):
            key = doc_name = "{0}-{1}-{2}".format(view_name, prefix, i)
            doc_names.append(doc_name)
            value = {"name": doc_name, "age": 1000}
            smart.set(key, 0, 0, json.dumps(value))
        self.log.info("inserted {0} json documents".format(len(doc_names)))
        time.sleep(10)
        results = ViewBaseTests._get_view_results(self, rest, bucket, view_name, len(doc_names), extra_params=params)
        view_time = results['view_time']

        keys = ViewBaseTests._get_keys(self, results)

        RebalanceHelper.wait_for_persistence(master, bucket, 0)

        total_time = view_time
        # Keep trying this for maximum 5 minutes
        start_time = time.time()
        # increase timeout to 600 seconds for windows testing
        while (len(keys) != len(doc_names)) and (time.time() - start_time < 900):
            msg = "view returned {0} items , expected to return {1} items"
            self.log.info(msg.format(len(keys), len(doc_names)))
            self.log.info("trying again in {0} seconds".format(delay))
            time.sleep(delay)
            results = ViewBaseTests._get_view_results(self, rest, bucket, view_name, len(doc_names), extra_params=params)
            view_time = results['view_time']
            total_time += view_time
            keys = ViewBaseTests._get_keys(self, results)

        self.log.info("View time: {0} secs".format(total_time))

        # Only if the lengths are not equal, look for missing keys
        if len(keys) != len(doc_names):
            not_found = list(set(doc_names) - set(keys))
            ViewBaseTests._print_keys_not_found(self, not_found, 10)
            self.fail("map function did not return docs for {0} keys".format(len(not_found)))
示例#19
0
 def make_ddocs(self, ddocs_num, views_per_ddoc,
                non_spatial_views_per_ddoc):
     ddocs = []
     for i in xrange(ddocs_num):
         views = []
         for k in xrange(views_per_ddoc):
             views.append(
                 View(self.default_view_name + str(k),
                      self.default_map,
                      dev_view=self.use_dev_views,
                      is_spatial=True))
         non_spatial_views = []
         if non_spatial_views_per_ddoc:
             for k in xrange(non_spatial_views_per_ddoc):
                 non_spatial_views.append(
                     View(self.default_view_name + str(k),
                          'function (doc) { emit(null, doc);}',
                          dev_view=self.use_dev_views))
         ddocs.append(
             DesignDocument(self.default_ddoc_name + str(i),
                            non_spatial_views,
                            spatial_views=views))
     return ddocs
示例#20
0
 def test_invalid_view(self):
     self._load_doc_data_all_buckets()
     invalid_view_name_list = ["", " leadingspace", "\nleadingnewline",
                               "\rleadingcarriagereturn", "\tleadingtab",
                               "trailingspace ", "trailingnewline\n",
                               "trailingcarriagereturn\r", "trailingtab\t"]
     for view_name in invalid_view_name_list:
         view = View(view_name, self.default_map_func, None)
         with self.assertRaises(DesignDocCreationException):
             self.cluster.create_view(
                 self.master, self.default_design_doc_name, view,
                 'default', self.wait_timeout * 2)
             self.fail("server allowed creation of invalid "
                            "view named `{0}`".format(view_name))
示例#21
0
 def setUp(self):
     super(CreateDeleteViewTests, self).setUp()
     self.bucket_ddoc_map = {}
     self.ddoc_ops = self.input.param("ddoc_ops", None)
     self.nodes_in = self.input.param("nodes_in", 1)
     self.nodes_out = self.input.param("nodes_out", 1)
     self.test_with_view = self.input.param("test_with_view", False)
     self.num_views_per_ddoc = self.input.param("num_views_per_ddoc", 1)
     self.num_ddocs = self.input.param("num_ddocs", 1)
     self.gen = None
     self.default_design_doc_name = "Doc1"
     self.default_map_func = 'function (doc) { emit(doc.age, doc.first_name);}'
     self.updated_map_func = 'function (doc) { emit(null, doc);}'
     self.default_view = View("View", self.default_map_func, None, False)
     self.fragmentation_value = self.input.param("fragmentation_value", 80)
示例#22
0
    def _verify_with_views(self, expected_rows):

        for bucket in self.buckets:
            default_map_func = 'function (doc, meta) { emit(meta.id, null);}'
            default_view = View("View", default_map_func, None, False)
            ddoc_name = "key_ddoc"

            self.create_views(self.master, ddoc_name, [default_view],
                              bucket.name)
            query = {"stale": "false", "connection_timeout": 60000}
            self.cluster.query_view(self.master,
                                    ddoc_name,
                                    default_view.name,
                                    query,
                                    expected_rows,
                                    bucket=bucket.name)
示例#23
0
 def setUp(self):
     super(ObserveTests, self).setUp()
     # self.pre_warmup_stats = {}
     self.node_servers = []
     self.timeout = 120
     self.nodes_in = int(self.input.param("nodes_in", 1))
     self.observe_with = self.input.param("observe_with", "")
     self.default_map_func = 'function (doc) { emit(doc.age, doc.first_name);}'
     self.default_design_doc = "Doc1"
     map_func = 'function (doc) { emit(null, doc);}'
     self.default_view = View("default_view", map_func, None)
     self.access_log = self.input.param("access_log", False)
     self.servs_in = [self.servers[i + 1] for i in range(self.nodes_in)]
     self.mutate_by = self.input.param("mutate_by", "set")
     self.log.info("Observe Rebalance Started")
     self.cluster.rebalance(self.servers[:1], self.servs_in, [])
     self.nodes_init = self.input.param("nodes_init", 2)
     self.without_access_log = self.input.param("without_access_log", False)
示例#24
0
    def _big_int_test_setup(self, num_items):

        timestamp = [13403751757202, 13403751757402, 13403751757302]
        docId = ['0830c075-2a81-448a-80d6-85214ee3ad64', '0830c075-2a81-448a-80d6-85214ee3ad65', '0830c075-2a81-448a-80d6-85214ee3ad66']
        conversationId = [929342299234203]
        msg = ['msg1', 'msg2']
        template = '{{ "docId": "{0}", "conversationId": {1}, "timestamp": {2}, "msg": "{3}" }}'

        gen_load = DocumentGenerator('test_docs', template, docId, conversationId, timestamp, msg, start=0, end=num_items)

        self.log.info("Inserting json data into bucket")
        self._load_all_buckets(self.master, gen_load, "create", 0)
        self._wait_for_stats_all_buckets([self.master])

        map_fn = 'function (doc) {emit([doc.conversationId, doc.timestamp], doc);}'
        view = [View('view_big_int', map_fn, dev_view=False)]

        self.create_views(self.master, 'ddoc_big_int', view)
示例#25
0
    def create_user_test_ddoc_check(self):
        rest = RestConnection(self.master)
        ddoc = DesignDocument("ddoc_ro_0", [View("ro_view",
                            "function (doc) {\n  emit(doc._id, doc);\n}",
                            dev_view=False)])
        rest.create_design_document(self.buckets[0], ddoc)

        rest.create_ro_user(username=self.username, password=self.password)
        self.master.rest_username = self.username
        self.master.rest_password = self.password
        rest = RestConnection(self.master)

        self.log.info("Try to delete ddoc")
        self.buckets[0].authType = ""
        try:
            rest.delete_view(self.buckets[0], ddoc.views[0])
        except Exception, ex:
            self.log.info("Unable to delete ddoc. Expected")
            self.buckets[0].authType = "sasl"
示例#26
0
    def measure_time_index_during_rebalance(self):
        num_ddocs = self.input.param("num_ddocs", 1)
        num_views = self.input.param("num_views", 1)
        is_dev_ddoc = self.input.param("is_dev_ddoc", False)
        ddoc_names = ['ddoc' + str(i) for i in xrange(num_ddocs)]
        map_func = """function (doc, meta) {{\n  emit(meta.id, "emitted_value_{0}");\n}}"""
        views = [
            View("view" + str(i), map_func.format(i), None, is_dev_ddoc, False)
            for i in xrange(num_views)
        ]
        #views = self.make_default_views(self.default_view_name, num_views, is_dev_ddoc)

        prefix = ("", "dev_")[is_dev_ddoc]
        query = {}
        query["connectionTimeout"] = 60000
        if not is_dev_ddoc:
            query["full_set"] = "true"
        tasks = []

        for bucket in self.buckets:
            for ddoc_name in ddoc_names:
                tasks += self.async_create_views(self.master, ddoc_name, views,
                                                 bucket)
        for task in tasks:
            task.result(self.wait_timeout * 5)
        for view in views:
            # run queries to create indexes
            self.cluster.query_view(self.master, prefix + ddoc_name, view.name,
                                    query)
        now = time.time()
        self.sleep(5)
        servs_init = self.servers[:self.nodes_init]
        servs_in = [
            self.servers[i + self.nodes_init] for i in range(self.nodes_in)
        ]
        servs_out = [
            self.servers[self.nodes_init - i - 1]
            for i in range(self.nodes_out)
        ]
        for i in xrange(num_ddocs * num_views * len(self.buckets)):
            #wait until all initial_build indexer processes are completed
            active_tasks = self.cluster.async_monitor_active_task(
                servs_init, "indexer", "True", wait_task=False)
            for active_task in active_tasks:
                result = active_task.result()
                self.assertTrue(result)
        self.log.info(
            "PERF: indexing time for {0} ddocs with {1} views:{2}".format(
                num_ddocs, num_views,
                time.time() - now))

        rest = RestConnection(self.master)
        #self._wait_for_stats_all_buckets(servs_init)
        self.log.info("current nodes : {0}".format(
            [node.id for node in rest.node_statuses()]))
        self.log.info("adding nodes {0} to cluster".format(servs_in))
        self.log.info("removing nodes {0} from cluster".format(servs_out))
        result_nodes = set(servs_init + servs_in) - set(servs_out)

        data_perc_add = self.input.param("data_perc_add", 10)
        gen_create = BlobGenerator('mike',
                                   'mike-',
                                   self.value_size,
                                   start=self.num_items + 1,
                                   end=self.num_items * (100 + data_perc_add) /
                                   100)
        load_tasks = self._async_load_all_buckets(self.master, gen_create,
                                                  "create", 0)
        rebalance = self.cluster.async_rebalance(servs_init, servs_in,
                                                 servs_out)

        expected_rows = self.num_items * (100 + data_perc_add) / 100 - 1
        start_time = time.time()

        tasks = {}
        for bucket in self.buckets:
            for ddoc_name in ddoc_names:
                for i in xrange(num_views):
                    tasks["{0}/_design/{1}/_view/{2}".format(bucket, ddoc_name, "view" + str(i))] = \
                        self.cluster.async_query_view(self.master, \
                                        prefix + ddoc_name, "view" + str(i), query, expected_rows, bucket)
        while len(tasks
                  ) > 0 and time.time() - start_time < self.wait_timeout * 30:
            completed_tasks = []
            for task in tasks:
                if tasks[task].done:
                    if tasks[task].result():
                        self.log.info("expected query result with view {0} was obtained in {1} seconds".\
                                 format(task, time.time() - start_time))
                        completed_tasks += [task]
            for completed_task in completed_tasks:
                del tasks[completed_task]

        if len(tasks) > 0:
            for task in tasks:
                tasks[task].result(self.wait_timeout)

        load_tasks[0].result()
        rebalance.result()

        self.verify_cluster_stats(result_nodes)
示例#27
0
    def _init_parameters(self):
        self._log.info("Initializing input parameters started...")
        self._clusters_dic = self._input.clusters  # clusters is declared as dic in TestInput which is unordered.
        self._clusters_keys_olst = range(
            len(self._clusters_dic)
        )  #clusters are populated in the dic in testrunner such that ordinal is the key.
        #orderedDic cannot be used in order to maintain the compability with python 2.6
        self._cluster_counter_temp_int = 0
        self._cluster_names_dic = self._get_cluster_names()
        self._servers = self._input.servers
        self._disabled_consistent_view = self._input.param(
            "disabled_consistent_view", True)
        self._floating_servers_set = self._get_floating_servers(
        )  # These are the servers defined in .ini file but not linked to any cluster.
        self._cluster_counter_temp_int = 0  #TODO: fix the testrunner code to pass cluster name in params.
        self._buckets = []

        self._default_bucket = self._input.param("default_bucket", True)
        """
        ENTER: sasl_buckets=[no.] or standard_buckets=[no.]
        """
        self._standard_buckets = self._input.param("standard_buckets", 0)
        self._sasl_buckets = self._input.param("sasl_buckets", 0)

        if self._default_bucket:
            self.default_bucket_name = "default"

        self._num_replicas = self._input.param("replicas", 1)
        self._num_items = self._input.param("items", 1000)
        self._value_size = self._input.param("value_size", 256)
        self._dgm_run_bool = self._input.param("dgm_run", False)
        self._mem_quota_int = 0  # will be set in subsequent methods

        self._poll_interval = self._input.param(
            XDCRConstants.INPUT_PARAM_POLL_INTERVAL, 5)
        self._poll_timeout = self._input.param(
            XDCRConstants.INPUT_PARAM_POLL_TIMEOUT, 120)

        self.init_parameters_extended()

        self._doc_ops = self._input.param("doc-ops", None)
        if self._doc_ops is not None:
            self._doc_ops = self._doc_ops.split("-")
        self._doc_ops_dest = self._input.param("doc-ops-dest", None)
        # semi-colon separator is not accepted for some reason here
        if self._doc_ops_dest is not None:
            self._doc_ops_dest = self._doc_ops_dest.split("-")

        self._case_number = self._input.param("case_number", 0)
        self._expires = self._input.param("expires", 0)
        self._timeout = self._input.param("timeout", 60)
        self._percent_update = self._input.param("upd", 30)
        self._percent_delete = self._input.param("del", 30)
        self._warmup = self._input.param("warm", None)
        self._failover = self._input.param("failover", None)
        self._rebalance = self._input.param("rebalance", None)
        if self._warmup is not None:
            self._warmup = self._warmup.split("-")
        if self._failover is not None:
            self._failover = self._failover.split("-")
        if self._rebalance is not None:
            self._rebalance = self._rebalance.split("-")
            self._num_rebalance = self._input.param("num_rebalance", 1)
        """
        CREATE's a set of items,
        UPDATE's UPD% of the items starting from 0,
        DELETE's DEL% of the items starting from the end (count(items)).
        """
        self.gen_create = BlobGenerator('loadOne',
                                        'loadOne',
                                        self._value_size,
                                        end=self._num_items)
        self.gen_delete = BlobGenerator(
            'loadOne',
            'loadOne-',
            self._value_size,
            start=int(
                (self._num_items) * (float)(100 - self._percent_delete) / 100),
            end=self._num_items)
        self.gen_update = BlobGenerator(
            'loadOne',
            'loadOne-',
            self._value_size,
            start=0,
            end=int(self._num_items * (float)(self._percent_update) / 100))

        self.ord_keys = self._clusters_keys_olst
        self.ord_keys_len = len(self.ord_keys)

        self.src_nodes = self._clusters_dic[0]
        self.src_master = self.src_nodes[0]

        self.dest_nodes = self._clusters_dic[1]
        self.dest_master = self.dest_nodes[0]

        self._defaul_map_func = "function (doc) {\n  emit(doc._id, doc);\n}"
        self._default_view_name = "default_view"
        self._default_view = View(self._default_view_name,
                                  self._defaul_map_func, None)
        self._num_views = self._input.param("num_views", 5)
        self._is_dev_ddoc = self._input.param("is_dev_ddoc", True)

        self.fragmentation_value = self._input.param("fragmentation_value", 80)
        self.disable_src_comp = self._input.param("disable_src_comp", True)
        self.disable_dest_comp = self._input.param("disable_dest_comp", True)

        self._log.info("Initializing input parameters completed.")
示例#28
0
 def _execute_ddoc_ops(self,
                       ddoc_op_type,
                       test_with_view,
                       num_ddocs,
                       num_views_per_ddoc,
                       prefix_ddoc="dev_ddoc",
                       prefix_view="views",
                       start_pos_for_mutation=0,
                       bucket="default"):
     if ddoc_op_type == "create":
         self.log.info(
             "Processing Create DDoc Operation On Bucket {0}".format(
                 bucket))
         #if there are ddocs already, add to that else start with an empty map
         ddoc_view_map = self.bucket_ddoc_map.pop(bucket, {})
         for ddoc_count in xrange(num_ddocs):
             design_doc_name = prefix_ddoc + str(ddoc_count)
             view_list = []
             #Add views if flag is true
             if test_with_view:
                 #create view objects as per num_views_per_ddoc
                 view_list = self.make_default_views(
                     prefix_view, num_views_per_ddoc)
             #create view in the database
             self.create_views(self.master, design_doc_name, view_list,
                               bucket, self.wait_timeout * 2)
             #store the created views in internal dictionary
             ddoc_view_map[design_doc_name] = view_list
         #store the ddoc-view dict per bucket
         self.bucket_ddoc_map[bucket] = ddoc_view_map
     elif ddoc_op_type == "update":
         self.log.info(
             "Processing Update DDoc Operation On Bucket {0}".format(
                 bucket))
         #get the map dict for the bucket
         ddoc_view_map = self.bucket_ddoc_map[bucket]
         ddoc_map_loop_cnt = 0
         #iterate for all the ddocs
         for ddoc_name, view_list in ddoc_view_map.items():
             if ddoc_map_loop_cnt < num_ddocs:
                 #Update views if flag is true
                 if test_with_view:
                     #iterate and update all the views as per num_views_per_ddoc
                     for view_count in xrange(num_views_per_ddoc):
                         #create new View object to be updated
                         updated_view = View(
                             view_list[start_pos_for_mutation +
                                       view_count].name,
                             self.updated_map_func, None, False)
                         self.cluster.create_view(self.master, ddoc_name,
                                                  updated_view, bucket,
                                                  self.wait_timeout * 2)
                 else:
                     #update the existing design doc(rev gets updated with this call)
                     self.cluster.create_view(self.master, ddoc_name, None,
                                              bucket, self.wait_timeout * 2)
                 ddoc_map_loop_cnt += 1
     elif ddoc_op_type == "delete":
         self.log.info(
             "Processing Delete DDoc Operation On Bucket {0}".format(
                 bucket))
         #get the map dict for the bucket
         ddoc_view_map = self.bucket_ddoc_map[bucket]
         ddoc_map_loop_cnt = 0
         #iterate for all the ddocs
         for ddoc_name, view_list in ddoc_view_map.items():
             if ddoc_map_loop_cnt < num_ddocs:
                 #Update views if flag is true
                 if test_with_view:
                     for view_count in xrange(num_views_per_ddoc):
                         #iterate and update all the views as per num_views_per_ddoc
                         self.cluster.delete_view(
                             self.master, ddoc_name,
                             view_list[start_pos_for_mutation + view_count],
                             bucket, self.wait_timeout * 2)
                     #store the updated view list
                     ddoc_view_map[
                         ddoc_name] = view_list[:
                                                start_pos_for_mutation] + view_list[
                                                    start_pos_for_mutation +
                                                    num_views_per_ddoc:]
                 else:
                     #delete the design doc
                     self.cluster.delete_view(self.master, ddoc_name, None,
                                              bucket, self.wait_timeout * 2)
                     #remove the ddoc_view_map
                     del ddoc_view_map[ddoc_name]
                 ddoc_map_loop_cnt += 1
         #store the updated ddoc dict
         self.bucket_ddoc_map[bucket] = ddoc_view_map
     else:
         self.log.fail(
             "Invalid ddoc operation {0}. No execution done.".format(
                 ddoc_op_type))
示例#29
0
    def setUp(self):
        super(ViewBaseTest, self).setUp()

        map_func = 'function (doc) { emit(null, doc);}'
        self.default_view = View("default_view", map_func, None)