class TestPartitionOperations(TestcaseBase): """ Test case of partition interface in operations """ @pytest.mark.tags(CaseLabel.L1) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_dropped_collection(self): """ target: verify create partition against a dropped collection method: 1. create collection1 2. drop collection1 3. create partition in collection1 expected: 1. raise exception """ # create collection collection_w = self.init_collection_wrap() # drop collection collection_w.drop() # create partition failed self.partition_wrap.init_partition(collection_w.collection, cf.gen_unique_str(prefix), check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "can't find collection" }) @pytest.mark.tags(CaseLabel.L2) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_same_name_in_diff_collections(self): """ target: verify create partitions with same name in diff collections method: 1. create a partition in collection1 2. create a partition in collection2 expected: 1. create successfully """ # create two collections collection_w1 = self.init_collection_wrap() collection_w2 = self.init_collection_wrap() # create 2 partitions in 2 diff collections partition_name = cf.gen_unique_str(prefix) self.init_partition_wrap(collection_wrap=collection_w1, name=partition_name) self.init_partition_wrap(collection_wrap=collection_w2, name=partition_name) # check result assert collection_w1.has_partition(partition_name)[0] assert collection_w2.has_partition(partition_name)[0] @pytest.mark.tags(CaseLabel.L2) def test_partition_multi_partitions_in_collection(self): """ target: verify create multiple partitions in one collection method: 1. create multiple partitions in one collection expected: 1. create successfully """ # create collection collection_w = self.init_collection_wrap() for _ in range(10): partition_name = cf.gen_unique_str(prefix) # create partition with different names and check the partition exists self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="skip for memory issue check") def test_partition_maximum_partitions(self): """ target: verify create maximum partitions method: 1. create maximum partitions 2. create one more partition expected: 1. raise exception """ threads_num = 8 threads = [] def create_partition(collection, threads_n): for _ in range(ct.max_partition_num // threads_n): name = cf.gen_unique_str(prefix) par_wrap = ApiPartitionWrapper() par_wrap.init_partition(collection, name, check_task=CheckTasks.check_nothing) collection_w = self.init_collection_wrap() for _ in range(threads_num): t = threading.Thread(target=create_partition, args=(collection_w.collection, threads_num)) threads.append(t) t.start() for t in threads: t.join() p_name = cf.gen_unique_str() self.partition_wrap.init_partition( collection_w.collection, p_name, check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "maximum partition's number should be limit to 4096" }) @pytest.mark.tags(CaseLabel.L0) def test_partition_drop_default_partition(self): """ target: verify drop the _default partition method: 1. drop the _default partition expected: 1. raise exception """ # create collection collection_w = self.init_collection_wrap() # get the default partition default_partition, _ = collection_w.partition( ct.default_partition_name) partition_w = self.init_partition_wrap(collection_w, ct.default_partition_name) assert default_partition.name == partition_w.name # verify that drop partition with error partition_w.drop(check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "default partition cannot be deleted" }) @pytest.mark.tags(CaseLabel.L1) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_drop_partition_twice(self): """ target: verify drop the same partition twice method: 1.create a partition with default schema 2. drop the partition 3. drop the same partition again expected: raise exception when 2nd time """ # create collection collection_w = self.init_collection_wrap() # create partition partition_name = cf.gen_unique_str(prefix) partition_w = self.init_partition_wrap(collection_w, partition_name) collection_w.has_partition(partition_name) # drop partition partition_w.drop() assert not collection_w.has_partition(partition_name)[0] # verify that drop the partition again with exception partition_w.drop(check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: PartitionErrorMessage.PartitionNotExist }) @pytest.mark.tags(CaseLabel.L2) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_create_and_drop_multi_times(self): """ target: verify create and drop for times method: 1.create a partition with default schema 2. drop the partition 3. loop #1 and #2 for times expected: create and drop successfully """ # create collection collection_w = self.init_collection_wrap() # range for 5 times partition_name = cf.gen_unique_str(prefix) for i in range(5): # create partition and check that the partition exists partition_w = self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] # drop partition and check that the partition not exists partition_w.drop() assert not collection_w.has_partition(partition_name)[0] @pytest.mark.tags(CaseLabel.L2) # @pytest.mark.parametrize("flush", [True, False]) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_drop_non_empty_partition(self): """ target: verify drop a partition which has data inserted method: 1.create a partition with default schema 2. insert some data 3. flush / not flush 3. drop the partition expected: drop successfully """ # create collection collection_w = self.init_collection_wrap() # create partition partition_name = cf.gen_unique_str(prefix) partition_w = self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] # insert data to partition partition_w.insert(cf.gen_default_dataframe_data()) # # flush remove flush for issue #5837 # if flush: # self._connect().flush([collection_w.name]) # drop partition partition_w.drop() assert not collection_w.has_partition(partition_name)[0] @pytest.mark.tags(CaseLabel.L2) # @pytest.mark.parametrize("flush", [True, False]) @pytest.mark.parametrize("data", [cf.gen_default_list_data(nb=3000)]) @pytest.mark.parametrize("index_param", cf.gen_simple_index()) def test_partition_drop_indexed_partition(self, data, index_param): """ target: verify drop an indexed partition method: 1.create a partition 2. insert same data 3. create an index 4. flush or not flush (remove flush step for issue # 5837) 5. drop the partition expected: drop successfully """ # create collection collection_w = self.init_collection_wrap() # create partition partition_name = cf.gen_unique_str(prefix) partition_w = self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] # insert data to partition ins_res, _ = partition_w.insert(data) assert len(ins_res.primary_keys) == len(data[0]) # create index of collection collection_w.create_index(ct.default_float_vec_field_name, index_param) # # flush # if flush: # self._connect().flush([collection_w.name]) # drop partition partition_w.drop() assert not collection_w.has_partition(partition_name)[0] @pytest.mark.tags(CaseLabel.L1) def test_partition_release_empty_partition(self): """ target: verify release an empty partition method: 1.create a partition 2. release the partition expected: release successfully """ # create partition partition_w = self.init_partition_wrap() assert partition_w.is_empty # release partition partition_w.release() # TODO: assert no more memory consumed @pytest.mark.tags(CaseLabel.L1) def test_partition_release_dropped_partition(self): """ target: verify release an dropped partition method: 1.create a partition 2. drop the partition 2. release the partition expected: raise exception """ # create partition partition_w = self.init_partition_wrap() # drop partition partition_w.drop() # release the dropped partition and check err response partition_w.release(check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: PartitionErrorMessage.PartitionNotExist }) @pytest.mark.tags(CaseLabel.L1) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_release_dropped_collection(self): """ target: verify release an dropped collection method: 1.create a collection and partition 2. drop the collection 2. release the partition expected: raise exception """ # create collection collection_w = self.init_collection_wrap() # create partition partition_name = cf.gen_unique_str(prefix) partition_w = self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] # drop collection collection_w.drop() # release the partition and check err response partition_w.release(check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "can't find collection" }) @pytest.mark.tags(CaseLabel.L1) # @pytest.mark.parametrize("partition_name, search_vectors", # [(cf.gen_unique_str(prefix), cf.gen_vectors(1, ct.default_dim))]) def test_partition_release_after_collection_released(self): """ target: verify release a partition after the collection released method: 1.create a collection and partition 2. insert some data 2. release the collection 2. release the partition expected: partition released successfully """ # create collection collection_w = self.init_collection_wrap() # create partition partition_name = cf.gen_unique_str(prefix) partition_w = self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] # insert data to partition data = cf.gen_default_list_data() partition_w.insert(data) assert partition_w.num_entities == len(data[0]) assert collection_w.num_entities == len(data[0]) # load partition partition_w.load() # search of partition search_vectors = cf.gen_vectors(1, ct.default_dim) res_1, _ = partition_w.search( data=search_vectors, anns_field=ct.default_float_vec_field_name, params={"nprobe": 32}, limit=1) assert len(res_1) == 1 # release collection collection_w.release() # search of partition res_2, _ = partition_w.search( data=search_vectors, anns_field=ct.default_float_vec_field_name, params={"nprobe": 32}, limit=1, check_task=ct.CheckTasks.err_res, check_items={ ct.err_code: 0, ct.err_msg: "not loaded into memory" }) # release partition partition_w.release() @pytest.mark.tags(CaseLabel.L1) # @pytest.mark.parametrize("partition_name, data", [(ct.default_partition_name, cf.gen_default_dataframe_data())]) def test_partition_insert_default_partition(self): """ target: verify insert data into _default partition method: 1.create a collection 2. insert some data into _default partition expected: insert successfully """ # create collection collection_w = self.init_collection_wrap() # get the default partition partition_name = ct.default_partition_name assert collection_w.has_partition(partition_name)[0] partition_w = self.init_partition_wrap(collection_w, partition_name) # insert data to partition data = cf.gen_default_dataframe_data() partition_w.insert(data) # self._connect().flush([collection_w.name]) assert partition_w.num_entities == len(data) @pytest.mark.tags(CaseLabel.L1) def test_partition_insert_dropped_partition(self): """ target: verify insert data into dropped partition method: 1.create a collection 2. insert some data into dropped partition expected: raise exception """ # create partition partition_w = self.init_partition_wrap() # drop partition partition_w.drop() # insert data to partition partition_w.insert(cf.gen_default_dataframe_data(), check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "Partition not exist" }) # TODO: update the assert error @pytest.mark.tags(CaseLabel.L1) # @pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)]) def test_partition_insert_dropped_collection(self): """ target: verify insert data into dropped collection method: 1.create a collection 2. insert some data into dropped collection expected: raise exception """ # create collection collection_w = self.init_collection_wrap() # create partition partition_name = cf.gen_unique_str(prefix) partition_w = self.init_partition_wrap(collection_w, partition_name) assert collection_w.has_partition(partition_name)[0] # drop collection collection_w.drop() # insert data to partition partition_w.insert(cf.gen_default_dataframe_data(), check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "None Type" }) @pytest.mark.tags(CaseLabel.L2) def test_partition_insert_maximum_size_data(self): """ target: verify insert maximum size data(256M?) a time method: 1.create a partition 2. insert maximum size data expected: insert successfully """ # create collection collection_w = self.init_collection_wrap() # create partition partition_w = self.init_partition_wrap(collection_w) # insert data to partition max_size = 100000 # TODO: clarify the max size of data ins_res, _ = partition_w.insert( cf.gen_default_dataframe_data(max_size), timeout=40) assert len(ins_res.primary_keys) == max_size # self._connect().flush([collection_w.name]) assert partition_w.num_entities == max_size @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("dim", [ct.default_dim - 1, ct.default_dim + 1]) def test_partition_insert_mismatched_dimensions(self, dim): """ target: verify insert maximum size data(256M?) a time method: 1.create a collection with default dim 2. insert dismatch dim data expected: raise exception """ # create partition partition_w = self.init_partition_wrap() data = cf.gen_default_list_data(nb=10, dim=dim) # insert data to partition partition_w.insert(data, check_task=CheckTasks.err_res, check_items={ ct.err_code: 1, ct.err_msg: "but entities field dim" }) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("sync", [True, False]) def test_partition_insert_sync(self, sync): """ target: verify insert sync method: 1.create a partition 2. insert data in sync expected: insert successfully """ pass
] if log_config.log_worker != "": file_path_list.append(log_config.log_worker) cf.modify_file(file_path_list=file_path_list, is_modify=clean_log) log.info("#" * 80) log.info("[initialize_milvus] Log cleaned up, start testing...") param_info.prepare_param_info(host, port, handler) @pytest.fixture(params=ct.get_invalid_strs) def get_invalid_string(request): yield request.param @pytest.fixture(params=cf.gen_simple_index()) def get_index_param(request): yield request.param @pytest.fixture(params=ct.get_invalid_strs) def get_invalid_collection_name(request): yield request.param @pytest.fixture(params=ct.get_invalid_strs) def get_invalid_field_name(request): yield request.param @pytest.fixture(params=ct.get_invalid_strs)
class TestcaseBase(Base): """ Additional methods; Public methods that can be used to add cases. """ @pytest.fixture(scope="module", params=ct.get_invalid_strs) def get_invalid_string(self, request): yield request.param @pytest.fixture(scope="module", params=cf.gen_simple_index()) def get_index_param(self, request): yield request.param def _connect(self): """ Add an connection and create the connect """ self.connection_wrap.add_connection(default={ "host": param_info.param_host, "port": param_info.param_port }) res, is_succ = self.connection_wrap.connect(alias='default') if not is_succ: raise res log.info("_connect: Connected") return res def init_collection_wrap(self, name=None, schema=None, check_task=None, **kwargs): name = cf.gen_unique_str('coll_') if name is None else name schema = cf.gen_default_collection_schema( ) if schema is None else schema if self.connection_wrap.get_connection(alias='default')[0] is None: self._connect() collection_w = ApiCollectionWrapper() collection_w.init_collection(name=name, schema=schema, check_task=check_task, **kwargs) return collection_w def init_partition_wrap(self, collection_wrap=None, name=None, description=None, check_task=None, check_items=None, **kwargs): name = cf.gen_unique_str("partition_") if name is None else name description = cf.gen_unique_str( "partition_des_") if description is None else description collection_wrap = self.init_collection_wrap( ) if collection_wrap is None else collection_wrap partition_wrap = ApiPartitionWrapper() partition_wrap.init_partition(collection_wrap.collection, name, description, check_task=check_task, check_items=check_items, **kwargs) return partition_wrap def init_collection_general(self, prefix, insert_data=False, nb=ct.default_nb, partition_num=0, is_binary=False): """ target: create specified collections method: 1. create collections (binary/non-binary) 2. create partitions if specified 3. insert specified binary/non-binary data into each partition if any expected: return collection and raw data """ log.info("Test case of search interface: initialize before test case") conn = self._connect() collection_name = cf.gen_unique_str(prefix) vectors = [] binary_raw_vectors = [] # 1 create collection if is_binary: default_schema = cf.gen_default_binary_collection_schema() else: default_schema = cf.gen_default_collection_schema() log.info("init_collection_general: collection creation") collection_w = self.init_collection_wrap(name=collection_name, schema=default_schema) # 2 add extra partitions if specified (default is 1 partition named "_default") if partition_num > 0: cf.gen_partitions(collection_w, partition_num) # 3 insert data if specified if insert_data: collection_w, vectors, binary_raw_vectors = cf.insert_data( collection_w, nb, is_binary) if nb <= 32000: conn.flush([collection_w.name]) assert collection_w.is_empty == False assert collection_w.num_entities == nb collection_w.load() return collection_w, vectors, binary_raw_vectors