def __populate_create_gen(self): start = 0 if not self.collection_index: self.create_gen = JsonDocGenerator(name="emp", encoding="utf-8", start=start, end=start + self._num_items) else: elastic_ip = None elastic_port = None elastic_username = None elastic_password = None if self.compare_es: elastic_ip = self.elastic_node.ip elastic_port = self.elastic_node.port elastic_username = self.elastic_node.es_username elastic_password = self.elastic_node.es_password self.create_gen = SDKDataLoader(num_ops = self._num_items, percent_create = 100, percent_update=0, percent_delete=0, scope=self.scope, collection=self.collections, json_template="emp", start=start, end=start+self._num_items, es_compare=self.compare_es, es_host=elastic_ip, es_port=elastic_port, es_login=elastic_username, es_password=elastic_password, key_prefix="emp_", upd_del_shift=self._num_items )
def test_functions_where_documents_change_from_binary_to_json_data(self): gen_load_binary = BlobGenerator('binary1000000', 'binary', self.value_size, start=1, end=2016 * self.docs_per_day + 1) gen_load_json = JsonDocGenerator('binary', op_type="create", end=2016 * self.docs_per_day) gen_load_binary_del = copy.deepcopy(gen_load_binary) gen_load_json_del = copy.deepcopy(gen_load_json) # load binary data self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_binary, self.buckets[0].kvs[1], "create", exp=0, flag=0, batch_size=1000, compression=self.sdk_compression) body = self.create_save_function_body(self.function_name, self.handler_code) self.deploy_function(body) # convert data from binary to json # use the same doc-id's as binary to update from binary to json self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_binary_del, self.buckets[0].kvs[1], 'delete', batch_size=1000, compression=self.sdk_compression) self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_json, self.buckets[0].kvs[1], 'create', batch_size=1000, compression=self.sdk_compression) # Wait for eventing to catch up with all the update mutations and verify results self.verify_eventing_results(self.function_name, self.docs_per_day * 2016, skip_stats_validation=True) # delete all json docs self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_json_del, self.buckets[0].kvs[1], 'delete', batch_size=1000, compression=self.sdk_compression) # Wait for eventing to catch up with all the delete mutations and verify results self.verify_eventing_results(self.function_name, 0, skip_stats_validation=True) self.undeploy_and_delete_function(body)
def __populate_delete_gen(self): if self.collection_index: self.delete_gen = copy.deepcopy(self.create_gen) self.delete_gen.op_type = "delete" self.delete_gen.encoding = "utf-8" self.delete_gen.start = int((self.create_gen.end) * (float) (30 / 100)) self.delete_gen.end = self.create_gen.end self.delete_gen.delete() else: self.delete_gen = JsonDocGenerator( self.create_gen.name, op_type="delete", encoding="utf-8", start=int((self.create_gen.end) * (float)(30 / 100)), end=self.create_gen.end)
def test_read_binary_data_from_the_function(self): gen_load_binary = BlobGenerator('binary1000000', 'binary', self.value_size, start=1, end=2016 * self.docs_per_day + 1) gen_load_json = JsonDocGenerator('binary', op_type="create", end=2016 * self.docs_per_day) # load binary data on dst bucket and non json on src bucket with identical keys so that we can read them self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_json, self.buckets[0].kvs[1], "create", exp=0, flag=0, batch_size=1000, compression=self.sdk_compression) self.cluster.load_gen_docs(self.master, self.dst_bucket_name, gen_load_binary, self.buckets[0].kvs[1], "create", exp=0, flag=0, batch_size=1000, compression=self.sdk_compression) body = self.create_save_function_body( self.function_name, HANDLER_CODE.READ_BUCKET_OP_ON_DST) self.deploy_function(body) # wait for some time so that exception_count increases # This is because we can't read binary data from handler code self.sleep(60) stats = self.rest.get_all_eventing_stats() bucket_op_exception_count = stats[0]["failure_stats"][ "bucket_op_exception_count"] self.undeploy_and_delete_function(body) log.info("stats : {0}".format( json.dumps(stats, sort_keys=True, indent=4))) if bucket_op_exception_count == 0: self.fail("Reading binary data succeeded from handler code")
def __populate_delete_gen(self): self.delete_gen = JsonDocGenerator(self.create_gen.name, op_type="delete", start=int((self.create_gen.end) * (float)(100 - 30) / 100), end=self.create_gen.end)
def __populate_create_gen(self): start = 0 self.create_gen = JsonDocGenerator(name="emp", encoding="utf-8", start=start, end=start + self._num_items)