def test_create_delete_similar_views(self): ddoc_name_prefix = self.input.param("ddoc_name_prefix", "ddoc") view_name = self.input.param("view_name", "test_view") map_fn = 'function (doc) {if(doc.age !== undefined) { emit(doc.age, doc.name);}}' rest = RestConnection(self.servers[0]) ddocs = [DesignDocument(ddoc_name_prefix + "1", [View(view_name, map_fn, dev_view=False)], options={"updateMinChanges":0, "replicaUpdateMinChanges":0}), DesignDocument(ddoc_name_prefix + "2", [View(view_name, map_fn, dev_view=True)], options={"updateMinChanges":0, "replicaUpdateMinChanges":0})] ViewBaseTests._load_docs(self, self.num_docs, "test_") for ddoc in ddocs: results = self.create_ddoc(rest, 'default', ddoc) try: cluster = Cluster() cluster.delete_view(self.servers[0], ddocs[1].name, ddocs[1].views[0]) finally: cluster.shutdown() results_new = rest.query_view(ddocs[0].name, ddocs[0].views[0].name, 'default', {"stale" : "ok", "full_set" : "true"}) self.assertEquals(results.get(u'rows', []), results_new.get(u'rows', []), "Results returned previosly %s don't match with current %s" % ( results.get(u'rows', []), results_new.get(u'rows', [])))
def _verify_ddoc_ops_all_buckets(self): self.log.info("DDoc Validation Started") rest = RestConnection(self.master) # Iterate over all the DDocs/Views stored in the internal dictionary for bucket, self.ddoc_view_map in self.bucket_ddoc_map.items(): for ddoc_name, view_list in self.ddoc_view_map.items(): try: # fetch the DDoc information from the database ddoc_json, header = rest.get_ddoc(bucket, ddoc_name) self.log.info("Database Document {0} details : {1}".format(ddoc_name, json.dumps(ddoc_json))) ddoc = DesignDocument._init_from_json(ddoc_name, ddoc_json) for view in view_list: if view.name not in [v.name for v in ddoc.views]: self.fail( "Validation Error: View - {0} in Design Doc - {1} and Bucket - {2} is missing from database".format( view.name, ddoc_name, bucket ) ) except ReadDocumentException: self.fail( "Validation Error: Design Document - {0} is missing from Bucket - {1}".format(ddoc_name, bucket) ) self.log.info("DDoc Validation Successful")
def execute(self, task_manager): rest = RestConnection(self.server) try: # remove view from existing design doc content = rest.get_ddoc(self.bucket, self.design_doc_name) ddoc = DesignDocument._init_from_json(self.design_doc_name, content) status = ddoc.delete_view(self.view) if not status: self.state = FINISHED self.set_exception(Exception('View does not exist! %s' % (self.view.name))) # update design doc rest.create_design_document(self.bucket, ddoc) self.state = CHECKING task_manager.schedule(self, 2) except (ValueError, ReadDocumentException, DesignDocCreationException) as e: self.state = FINISHED self.set_exception(e) #catch and set all unexpected exceptions except Exception as e: self.state = FINISHED self.log.info("Unexpected Exception Caught") self.set_exception(e)
def execute(self, task_manager): rest = RestConnection(self.server) try: # appending view to existing design doc content = rest.get_ddoc(self.bucket, self.design_doc_name) ddoc = DesignDocument._init_from_json(self.design_doc_name, content) ddoc.add_view(self.view) self.ddoc_rev_no = self._parse_revision(content['_rev']) except ReadDocumentException: # creating first view in design doc ddoc = DesignDocument(self.design_doc_name, [self.view]) try: rest.create_design_document(self.bucket, ddoc) self.state = CHECKING task_manager.schedule(self) except DesignDocCreationException as e: self.state = FINISHED self.set_exception(e) #catch and set all unexpected exceptions except Exception as e: self.state = FINISHED self.log.info("Unexpected Exception Caught") self.set_exception(e)
def test_add_single_spatial_view(self): name_lenght = self.input.param('name_lenght', None) view_name = self.input.param('view_name', self.default_view_name) if name_lenght: view_name = ''.join( random.choice(string.lowercase) for x in xrange(name_lenght)) not_compilable = self.input.param('not_compilable', False) error = self.input.param('error', None) map_fn = ( self.default_map, 'function (doc) {emit(doc.geometry, doc.age);')[not_compilable] ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[ View(view_name, map_fn, dev_view=self.use_dev_views, is_spatial=True) ]) try: self.create_ddocs([ddoc]) except Exception as ex: if error and str(ex).find(error) != -1: self.log.info("Error caught as expected %s" % error) return else: self.fail("Unexpected error appeared during run %s" % ex) if error: self.fail("Expected error '%s' didn't appear" % error)
def test_views_during_ddoc_compaction(self): fragmentation_value = self.input.param("fragmentation_value", 80) ddoc_to_compact = DesignDocument( "ddoc_to_compact", [], spatial_views=[ View(self.default_view_name, 'function (doc) { emit(doc.age, doc.name);}', dev_view=self.use_dev_views) ]) ddocs = self.make_ddocs(self.num_ddoc, self.views_per_ddoc, 0) self.disable_compaction() self.create_ddocs([ ddoc_to_compact, ]) fragmentation_monitor = self.cluster.async_monitor_view_fragmentation( self.master, ddoc_to_compact.name, fragmentation_value, self.default_bucket_name) end_time = time.time() + self.wait_timeout * 30 while fragmentation_monitor.state != "FINISHED" and end_time > time.time( ): self.helper.insert_docs(self.num_items, 'spatial-doc', wait_for_persistence=True) if end_time < time.time( ) and fragmentation_monitor.state != "FINISHED": self.fail("impossible to reach compaction value after %s sec" % (self.wait_timeout * 20)) fragmentation_monitor.result() compaction_task = self.cluster.async_compact_view( self.master, ddoc_to_compact.name, self.default_bucket_name) self.perform_ddoc_ops(ddocs) result = compaction_task.result(self.wait_timeout * 10) self.assertTrue( result, "Compaction didn't finished correctly. Please check diags")
def test_add_spatial_views_threads(self): same_names = self.input.param('same-name', False) num_views_per_ddoc = 10 create_threads = [] ddocs = [] for i in xrange(num_views_per_ddoc): ddoc = DesignDocument(self.default_ddoc_name + str(i), [], spatial_views=[ View(self.default_view_name + (str(i), "")[same_names], self.default_map, dev_view=self.use_dev_views, is_spatial=True) ]) ddocs.append(ddoc) if self.ddoc_op == 'update' or self.ddoc_op == 'delete': self.create_ddocs(ddocs) i = 0 for ddoc in ddocs: create_thread = Thread(target=self.perform_ddoc_ops, name="ops_thread" + str(i), args=([ ddoc, ], )) i += 1 create_threads.append(create_thread) create_thread.start() for create_thread in create_threads: create_thread.join() if self.thread_crashed.is_set(): self.fail("Error occured during run")
def test_add_views_to_1_ddoc(self): same_names = self.input.param('same-name', False) error = self.input.param('error', None) num_views_per_ddoc = 10 create_threads = [] try: for i in xrange(num_views_per_ddoc): ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[ View(self.default_view_name + (str(i), "")[same_names], self.default_map, dev_view=self.use_dev_views, is_spatial=True) ]) create_thread = Thread(target=self.create_ddocs, name="create_thread" + str(i), args=([ ddoc, ], )) create_threads.append(create_thread) create_thread.start() for create_thread in create_threads: create_thread.join() except Exception as ex: if error and str(ex).find(error) != -1: self.log.info("Error caught as expected %s" % error) return else: self.fail("Unexpected error appeared during run %s" % ex) if error: self.fail("Expected error '%s' didn't appear" % error)
def make_ddocs(self, ddocs_num, views_per_ddoc): ddoc_name = "compaction_ddoc" view_name = "compaction_view" for i in xrange(ddocs_num): views = self.make_default_views(view_name, views_per_ddoc, different_map=True) self.ddocs.append(DesignDocument(ddoc_name + str(i), views))
def create_default_views(self, is_one_ddoc=False): views = [ View(self.testcase.default_view_name + "0", 'function (doc) {emit(doc.geometry, doc.age);}', dev_view=self.testcase.use_dev_views, is_spatial=True), View(self.testcase.default_view_name + "1", 'function (doc) {emit(doc.geometry, null);}', dev_view=self.testcase.use_dev_views, is_spatial=True), View(self.testcase.default_view_name + "2", 'function (doc) {emit(doc.geometry, doc.name);}', dev_view=self.testcase.use_dev_views, is_spatial=True), View(self.testcase.default_view_name + "3", 'function (doc) {emit(doc.geometry, [doc.name, doc.age]);}', dev_view=self.testcase.use_dev_views, is_spatial=True), View( self.testcase.default_view_name + "4", 'function (doc) {emit(doc.geometry, {result : {age:doc.age}});}', dev_view=self.testcase.use_dev_views, is_spatial=True) ] ddocs = [] if is_one_ddoc: ddocs.append( DesignDocument(self.testcase.default_ddoc_name, [], spatial_views=views)) else: for i in xrange(5): ddocs.append( DesignDocument(self.testcase.default_ddoc_name + str(i), [], spatial_views=[views[i]])) for ddoc in ddocs: for view in ddoc.spatial_views: self.testcase.cluster.create_view( self.testcase.master, ddoc.name, view, bucket=self.testcase.bucket_name) return ddocs
def test_add_spatial_views_case_sensative(self): ddoc = DesignDocument(self.default_ddoc_name, [], spatial_views=[ View(self.default_view_name, self.default_map, dev_view=self.use_dev_views, is_spatial=True), View(self.default_view_name.upper(), self.default_map, dev_view=self.use_dev_views, is_spatial=True) ]) self.create_ddocs([ddoc])
def _verify_ddoc_ops_all_buckets(self): rest = RestConnection(self.servers[0]) for bucket, self.ddoc_view_map in self.bucket_ddoc_map.items(): for ddoc_name, self.view_name_list in self.ddoc_view_map.items(): try: ddoc_json = rest.get_ddoc(bucket, ddoc_name) self.log.info('Document {0} details : {1}'.format(ddoc_name,json.dumps(ddoc_json))) ddoc = DesignDocument._init_from_json(ddoc_name, ddoc_json) for view_name in self.view_name_list: if view_name not in [view.name for view in ddoc.views]: self.fail("Validation Error: View - {0} in Design Doc - {1} and Bucket - {2} is missing".format(view_name,ddoc_name, bucket)) except ReadDocumentException: self.fail("Validation Error: Design Document - {0} is missing".format(ddoc_name))
def _verify_ddoc_ops_all_buckets(self): self.log.info("DDoc Validation Started") rest = RestConnection(self.master) #Iterate over all the DDocs/Views stored in the internal dictionary for bucket, self.ddoc_view_map in self.bucket_ddoc_map.items(): for ddoc_name, view_list in self.ddoc_view_map.items(): try: #fetch the DDoc information from the database ddoc_json, header = rest.get_ddoc(bucket, ddoc_name) self.log.info('Database Document {0} details : {1}'.format(ddoc_name, json.dumps(ddoc_json))) ddoc = DesignDocument._init_from_json(ddoc_name, ddoc_json) for view in view_list: if view.name not in [v.name for v in ddoc.views]: self.fail("Validation Error: View - {0} in Design Doc - {1} and Bucket - {2} is missing from database".format(view.name, ddoc_name, bucket)) except ReadDocumentException: self.fail("Validation Error: Design Document - {0} is missing from Bucket - {1}".format(ddoc_name, bucket)) self.log.info("DDoc Validation Successful")
def create_user_test_ddoc_check(self): rest = RestConnection(self.master) ddoc = DesignDocument("ddoc_ro_0", [View("ro_view", "function (doc) {\n emit(doc._id, doc);\n}", dev_view=False)]) rest.create_design_document(self.buckets[0], ddoc) rest.create_ro_user(username=self.username, password=self.password) self.master.rest_username = self.username self.master.rest_password = self.password rest = RestConnection(self.master) self.log.info("Try to delete ddoc") self.buckets[0].authType = "" try: rest.delete_view(self.buckets[0], ddoc.views[0]) except Exception, ex: self.log.info("Unable to delete ddoc. Expected") self.buckets[0].authType = "sasl"
def rebalance_in_with_ddoc_compaction(self): fragmentation_value = self.input.param("fragmentation_value", 80) is_dev_ddoc = False ddoc_name = "ddoc_compaction" map_fn_2 = "function (doc) { if (doc.first_name == 'sharon') {emit(doc.age, doc.first_name);}}" ddoc = DesignDocument(ddoc_name, [View(ddoc_name + "0", self.default_map_func, None, dev_view=is_dev_ddoc), View(ddoc_name + "1", map_fn_2, None, dev_view=is_dev_ddoc)]) prefix = ("", "dev_")[is_dev_ddoc] query = {"connectionTimeout" : 60000} self.disable_compaction() for view in ddoc.views: self.cluster.create_view(self.master, ddoc.name, view, bucket=self.default_bucket_name) generator = self._load_doc_data_all_buckets() RebalanceHelper.wait_for_persistence(self.master, self.default_bucket_name) # generate load until fragmentation reached rebalance = self.cluster.async_rebalance([self.master], self.servers[1:self.nodes_in + 1], []) while rebalance.state != "FINISHED": fragmentation_monitor = self.cluster.async_monitor_view_fragmentation(self.master, prefix + ddoc_name, fragmentation_value, self.default_bucket_name) end_time = time.time() + self.wait_timeout * 30 while fragmentation_monitor.state != "FINISHED" and end_time > time.time(): # update docs to create fragmentation self._load_doc_data_all_buckets("update", gen_load=generator) for view in ddoc.views: # run queries to create indexes self.cluster.query_view(self.master, prefix + ddoc_name, view.name, query) if end_time < time.time() and fragmentation_monitor.state != "FINISHED": self.fail("impossible to reach compaction value after %s sec" % (self.wait_timeout * 20)) fragmentation_monitor.result() compaction_task = self.cluster.async_compact_view(self.master, prefix + ddoc_name, self.default_bucket_name, with_rebalance=True) result = compaction_task.result(self.wait_timeout * 10) self.assertTrue(result, "Compaction didn't finished correctly. Please check diags") rebalance.result()
def make_ddocs(self, ddocs_num, views_per_ddoc, non_spatial_views_per_ddoc): ddocs = [] for i in xrange(ddocs_num): views = [] for k in xrange(views_per_ddoc): views.append( View(self.default_view_name + str(k), self.default_map, dev_view=self.use_dev_views, is_spatial=True)) non_spatial_views = [] if non_spatial_views_per_ddoc: for k in xrange(non_spatial_views_per_ddoc): non_spatial_views.append( View(self.default_view_name + str(k), 'function (doc) { emit(null, doc);}', dev_view=self.use_dev_views)) ddocs.append( DesignDocument(self.default_ddoc_name + str(i), non_spatial_views, spatial_views=views)) return ddocs