def _update_data(self, number_of_items, db_name): docs = DocumentGenerator.make_docs(number_of_items, {"name": "user-${prefix}", "payload": "updated-payload-${prefix}-${padding}"}, {"size": 128, "seed": self.seed}) src_db = self.server[db_name] for doc in docs: src_db.save_doc(doc, force_update=True) log.info("updated {0} items".format(number_of_items))
def _get_db_and_generated_docs(self): db_name = self._get_db_name() self.server.create_db(db_name) src_db = self.server[db_name] number_of_items = 100 docs = DocumentGenerator.make_docs(number_of_items, {"name": "user-${prefix}", "payload": "payload-${prefix}-${padding}"}, {"size": 1024, "seed": str(uuid.uuid4())}) return src_db, docs
def _replication(self, items, replication_type, destination, doc_size): src_db_name = self._get_db_name(replication_type) self.server.create_db(src_db_name) dst_db_name = self._get_db_name(replication_type) url = "http://{0}:{1}/".format(destination['ip'], destination['port']) self.dst_server = client.Server(url, full_commit=False) self.dst_server.create_db(dst_db_name) description = "insert {0} items in source database.start replication from {1}(db:{2}) to {3}(db:{4})" self.log.info(description.format(items, config["couchdb-local"]["ip"], src_db_name, destination["ip"], dst_db_name)) docs = DocumentGenerator.make_docs(items, {"name": "user-${prefix}", "payload": "payload-${prefix}-${padding}"}, {"size": doc_size}) src_db = self.server[src_db_name] for doc in docs: src_db.save_doc(doc) self.log.info("saved {0} docs".format(len(docs))) source = "http://{0}:{1}/{2}".format(self.node["ip"], self.node["port"], src_db_name) destination = "http://{0}:{1}/{2}".format(destination["ip"], destination["port"], dst_db_name) self.log.info("starting the replication") filter_fn = '''function(doc) { if(doc.name != null) { return true; } else { return false; } }''' design_doc = {"_id": "_design/simple_filter", "filters": {"all_docs_filter": filter_fn}} self.server[src_db_name].save_doc(design_doc) replication_task_id = "" if replication_type == "filtered": data = self.server.replicate(source, destination, filter="simple_filter/all_docs_filter", continuous=True) self.log.info(data) if data and "_local_id" in data: replication_task_id = data["_local_id"] self._wait_for_replication(self.server, replication_task_id, 120) self.server.replicate(source, destination, filter="simple_filter/all_docs_filter", continuous=True, cancel=True) else: data = self.server.replicate(source, destination, continuous=True) self.log.info(data) if data and "_local_id" in data: replication_task_id = data["_local_id"] self._wait_for_replication(self.server, replication_task_id, 120) data = self.server.replicate(source, destination, continuous=True, cancel=True) #now let's cancel the replication self.log.info("replication completed") #verify that all the data is replicated tools.ok_(self._verify_replication(self.server, src_db_name, self.dst_server, dst_db_name, docs), msg="replication did not replicate some items")
def test_make_docs_1(self): log = logger.logger("test_make_docs_1") docs = DocumentGenerator.make_docs(10,{"name":"employee-${prefix}"}) for doc in docs: log.info(doc)