def push_to_dgm(self, bucket, dgm_percent):
     doc_size = 1024
     curr_active = self.bucket_stat('vb_active_perc_mem_resident', bucket)
     total_items = self.bucket_stat('curr_items', bucket)
     batch_items = 20000
     # go into dgm
     while curr_active > dgm_percent:
         curr_items = self.bucket_stat('curr_items', bucket)
         gen_create = BlobGenerator('dgmkv',
                                    'dgmkv-',
                                    doc_size,
                                    start=curr_items + 1,
                                    end=curr_items + 20000)
         total_items += batch_items
         try:
             self.cluster.load_gen_docs(self.master,
                                        bucket,
                                        gen_create,
                                        self.buckets[0].kvs[1],
                                        'create',
                                        exp=0,
                                        flag=0,
                                        batch_size=1000)
         except:
             pass
         curr_active = self.bucket_stat('vb_active_perc_mem_resident',
                                        bucket)
     log.info("bucket {0} in DGM, resident_ratio : {1}%".format(
         bucket, curr_active))
     total_items = self.bucket_stat('curr_items', bucket)
     return total_items
 def test_functions_where_dataset_has_binary_and_non_json_data(self):
     gen_load_binary = BlobGenerator('binary',
                                     'binary-',
                                     self.value_size,
                                     end=2016 * self.docs_per_day)
     values = ['1', '10']
     gen_load_non_json = JSONNonDocGenerator('non_json_docs',
                                             values,
                                             start=0,
                                             end=2016 * self.docs_per_day)
     gen_load_non_json_del = copy.deepcopy(gen_load_non_json)
     # load binary and non json data
     self.cluster.load_gen_docs(self.master, self.src_bucket_name,
                                gen_load_binary, self.buckets[0].kvs[1],
                                'create')
     self.cluster.load_gen_docs(self.master, self.src_bucket_name,
                                gen_load_non_json, self.buckets[0].kvs[1],
                                'create')
     body = self.create_save_function_body(self.function_name,
                                           self.handler_code)
     self.deploy_function(body)
     # Wait for eventing to catch up with all the update mutations and verify results
     self.verify_eventing_results(self.function_name,
                                  self.docs_per_day * 2016,
                                  skip_stats_validation=True)
     # delete non json documents
     self.cluster.load_gen_docs(self.master, self.src_bucket_name,
                                gen_load_non_json_del,
                                self.buckets[0].kvs[1], 'delete')
     # Wait for eventing to catch up with all the delete mutations and verify results
     self.verify_eventing_results(self.function_name,
                                  0,
                                  skip_stats_validation=True)
     self.undeploy_and_delete_function(body)
Пример #3
0
 def test_functions_where_documents_change_from_binary_to_json_data(self):
     gen_load_binary = BlobGenerator('binary1000000', 'binary', self.value_size, start=1,
                                     end=2016 * self.docs_per_day + 1)
     gen_load_json = JsonDocGenerator('binary', op_type="create", end=2016 * self.docs_per_day)
     gen_load_binary_del = copy.deepcopy(gen_load_binary)
     gen_load_json_del = copy.deepcopy(gen_load_json)
     # load binary data
     self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_binary, self.buckets[0].kvs[1], "create",
                                exp=0, flag=0, batch_size=1000, compression=self.sdk_compression)
     body = self.create_save_function_body(self.function_name, self.handler_code)
     self.deploy_function(body)
     # convert data from binary to json
     # use the same doc-id's as binary to update from binary to json
     self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_binary_del, self.buckets[0].kvs[1],
                                'delete', batch_size=1000, compression=self.sdk_compression)
     self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_json, self.buckets[0].kvs[1], 'create',
                                batch_size=1000, compression=self.sdk_compression)
     # Wait for eventing to catch up with all the update mutations and verify results
     self.verify_eventing_results(self.function_name, self.docs_per_day * 2016, skip_stats_validation=True)
     # delete all json docs
     self.cluster.load_gen_docs(self.master, self.src_bucket_name, gen_load_json_del, self.buckets[0].kvs[1],
                                'delete', batch_size=1000, compression=self.sdk_compression)
     # Wait for eventing to catch up with all the delete mutations and verify results
     self.verify_eventing_results(self.function_name, 0, skip_stats_validation=True)
     self.undeploy_and_delete_function(body)
Пример #4
0
 def test_functions_where_dataset_has_binary_and_json_data(self):
     gen_load = BlobGenerator('binary',
                              'binary-',
                              self.value_size,
                              end=2016 * self.docs_per_day)
     # load binary and json data
     self.cluster.load_gen_docs(self.master,
                                self.src_bucket_name,
                                gen_load,
                                self.buckets[0].kvs[1],
                                'create',
                                exp=0,
                                flag=0,
                                batch_size=1000,
                                compression=self.sdk_compression)
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size)
     body = self.create_save_function_body(self.function_name,
                                           self.handler_code)
     self.deploy_function(body)
     # Wait for eventing to catch up with all the update mutations and verify results
     self.verify_eventing_results(self.function_name,
                                  self.docs_per_day * 2016,
                                  skip_stats_validation=True)
     # delete both binary and json documents
     self.cluster.load_gen_docs(self.master,
                                self.src_bucket_name,
                                gen_load,
                                self.buckets[0].kvs[1],
                                'delete',
                                exp=0,
                                flag=0,
                                batch_size=1000,
                                compression=self.sdk_compression)
     self.load(self.gens_load,
               buckets=self.src_bucket,
               flag=self.item_flag,
               verify_data=False,
               batch_size=self.batch_size,
               op_type='delete')
     # Wait for eventing to catch up with all the delete mutations and verify results
     self.verify_eventing_results(self.function_name,
                                  0,
                                  skip_stats_validation=True)
     self.undeploy_and_delete_function(body)
Пример #5
0
 def test_read_binary_data_from_the_function(self):
     gen_load_binary = BlobGenerator('binary1000000',
                                     'binary',
                                     self.value_size,
                                     start=1,
                                     end=2016 * self.docs_per_day + 1)
     gen_load_json = JsonDocGenerator('binary',
                                      op_type="create",
                                      end=2016 * self.docs_per_day)
     # load binary data on dst bucket and non json on src bucket with identical keys so that we can read them
     self.cluster.load_gen_docs(self.master,
                                self.src_bucket_name,
                                gen_load_json,
                                self.buckets[0].kvs[1],
                                "create",
                                exp=0,
                                flag=0,
                                batch_size=1000,
                                compression=self.sdk_compression)
     self.cluster.load_gen_docs(self.master,
                                self.dst_bucket_name,
                                gen_load_binary,
                                self.buckets[0].kvs[1],
                                "create",
                                exp=0,
                                flag=0,
                                batch_size=1000,
                                compression=self.sdk_compression)
     body = self.create_save_function_body(
         self.function_name, HANDLER_CODE.READ_BUCKET_OP_ON_DST)
     self.deploy_function(body)
     # wait for some time so that exception_count increases
     # This is because we can't read binary data from handler code
     self.sleep(60)
     stats = self.rest.get_all_eventing_stats()
     bucket_op_exception_count = stats[0]["failure_stats"][
         "bucket_op_exception_count"]
     self.undeploy_and_delete_function(body)
     log.info("stats : {0}".format(
         json.dumps(stats, sort_keys=True, indent=4)))
     if bucket_op_exception_count == 0:
         self.fail("Reading binary data succeeded from handler code")