예제 #1
0
 def test_calc_distance_not_support_metric(self, get_support_metric_field,
                                           get_not_support_metric):
     """
     target: test calculated distance with invalid metric
     method: input invalid metric
     expected: raise exception
     """
     self._connect()
     vectors_l = cf.gen_vectors(default_nb, default_dim)
     vectors_r = cf.gen_vectors(default_nb, default_dim)
     op_l = {"float_vectors": vectors_l}
     op_r = {"float_vectors": vectors_r}
     metric_field = get_support_metric_field
     metric = get_not_support_metric
     params = {metric_field: metric}
     self.utility_wrap.calc_distance(op_l,
                                     op_r,
                                     params,
                                     check_task=CheckTasks.err_res,
                                     check_items={
                                         "err_code":
                                         1,
                                         "err_msg":
                                         "{} metric type is invalid for "
                                         "float vector".format(metric)
                                     })
예제 #2
0
 def do_search():
     for _ in range(5):
         search_res, _ = collection_w.search(
             cf.gen_vectors(1, ct.default_dim),
             ct.default_float_vec_field_name, ct.default_search_params,
             ct.default_limit)
         assert len(search_res[0]) == ct.default_limit
예제 #3
0
    def test_delete_insert_multi(self):
        """
        target: test delete after multi insert
        method: 1.create
                2.insert multi times, no flush
                3.load
                3.delete even number
                4.search and query
        expected: Verify result
        """
        # create collection, insert multi times, each with tmp_nb entities
        collection_w = self.init_collection_wrap(
            name=cf.gen_unique_str(prefix))
        multi = 3
        for i in range(multi):
            start = i * tmp_nb
            df = cf.gen_default_dataframe_data(tmp_nb, start=start)
            collection_w.insert(df)

        # delete even numbers
        ids = [i for i in range(0, tmp_nb * multi, 2)]
        expr = f'{ct.default_int64_field_name} in {ids}'
        collection_w.delete(expr)

        collection_w.load()
        collection_w.query(expr, check_task=CheckTasks.check_query_empty)
        search_res, _ = collection_w.search(
            cf.gen_vectors(ct.default_nq,
                           ct.default_dim), ct.default_float_vec_field_name,
            ct.default_search_params, ct.default_limit)
        for res_id in search_res[0].ids:
            assert res_id not in ids
예제 #4
0
    def test_compact_merge_and_search(self):
        """
        target: test compact and search
        method: 1.insert data into two segments
                2.compact
                3.load and search
        expected: Verify search result
        """
        collection_w = self.collection_insert_multi_segments_one_shard(
            prefix, nb_of_segment=ct.default_nb, is_dup=False)

        # compact
        collection_w.compact()
        collection_w.wait_for_compaction_completed()
        collection_w.get_compaction_plans()

        # search
        collection_w.load()
        search_res, _ = collection_w.search(
            cf.gen_vectors(ct.default_nq,
                           ct.default_dim), ct.default_float_vec_field_name,
            ct.default_search_params, ct.default_limit)
        assert len(search_res) == ct.default_nq
        for hits in search_res:
            assert len(hits) == ct.default_limit
예제 #5
0
    def test_memory_stress_replicas_group_load_balance(self, prepare_collection):
        """
        target: test apply memory stress on replicas and load balance inside group
        method: 1.Deploy milvus and limit querynode memory 6Gi
                2.Insret 1000,000 entities (500Mb), load 2 replicas (memory usage 1.5Gb)
                3.Apply memory stress 4Gi on querynode
        expected: Verify that load balancing occurs
        """
        collection_w = prepare_collection
        utility_w = ApiUtilityWrapper()
        release_name = "mic-memory"

        # load and searchc
        collection_w.load(replica_number=2)
        progress, _ = utility_w.loading_progress(collection_w.name)
        assert progress["loading_progress"] == "100%"

        # get the replica and random chaos querynode
        replicas, _ = collection_w.get_replicas()
        chaos_querynode_id = replicas.groups[0].group_nodes[0]
        label = f"app.kubernetes.io/instance={release_name}, app.kubernetes.io/component=querynode"
        querynode_id_pod_pair = get_querynode_id_pod_pairs("chaos-testing", label)
        chaos_querynode_pod = querynode_id_pod_pair[chaos_querynode_id]

        # get the segment num before chaos
        seg_info_before, _ = utility_w.get_query_segment_info(collection_w.name)
        seg_distribution_before = cf.get_segment_distribution(seg_info_before)
        segments_num_before = len(seg_distribution_before[chaos_querynode_id]["sealed"])
        log.debug(segments_num_before)
        log.debug(seg_distribution_before[chaos_querynode_id]["sealed"])

        # apply memory stress
        chaos_config = gen_experiment_config("./chaos_objects/memory_stress/chaos_replicas_memory_stress_pods.yaml")
        chaos_config['spec']['selector']['pods']['chaos-testing'] = [chaos_querynode_pod]
        log.debug(chaos_config)
        chaos_res = CusResource(kind=chaos_config['kind'],
                                group=constants.CHAOS_GROUP,
                                version=constants.CHAOS_VERSION,
                                namespace=constants.CHAOS_NAMESPACE)
        chaos_res.create(chaos_config)
        log.debug(f"Apply memory stress on querynode {chaos_querynode_id}, pod {chaos_querynode_pod}")

        duration = chaos_config.get('spec').get('duration')
        duration = duration.replace('h', '*3600+').replace('m', '*60+').replace('s', '*1+') + '+0'
        sleep(eval(duration))

        chaos_res.delete(metadata_name=chaos_config.get('metadata', None).get('name', None))

        # Verfiy auto load loadbalance
        seg_info_after, _ = utility_w.get_query_segment_info(collection_w.name)
        seg_distribution_after = cf.get_segment_distribution(seg_info_after)
        segments_num_after = len(seg_distribution_after[chaos_querynode_id]["sealed"])
        log.debug(segments_num_after)
        log.debug(seg_distribution_after[chaos_querynode_id]["sealed"])

        assert segments_num_after < segments_num_before
        search_res, _ = collection_w.search(cf.gen_vectors(1, dim=self.dim),
                                            ct.default_float_vec_field_name, ct.default_search_params,
                                            ct.default_limit, timeout=120)
        assert 1 == len(search_res) and ct.default_limit == len(search_res[0])
예제 #6
0
    def test_compact_search_after_delete_channel(self):
        """
        target: test search after compact, and queryNode get delete request from channel,
                rather than compacted delta log
        method: 1.insert, flush and load
                2.delete half
                3.compact
                4.search
        expected: No compact, compact get delta log from storage
        """
        collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix), shards_num=1)

        df = cf.gen_default_dataframe_data()
        insert_res, _ = collection_w.insert(df)
        assert collection_w.num_entities == ct.default_nb

        collection_w.load()

        expr = f'{ct.default_int64_field_name} in {insert_res.primary_keys[:ct.default_nb // 2]}'
        collection_w.delete(expr)

        collection_w.compact()
        c_plans = collection_w.get_compaction_plans()[0]
        assert len(c_plans.plans) == 0

        # search
        collection_w.load()
        search_res, _ = collection_w.search(cf.gen_vectors(ct.default_nq, ct.default_dim),
                                            ct.default_float_vec_field_name,
                                            ct.default_search_params, ct.default_limit,
                                            check_task=CheckTasks.check_search_results,
                                            check_items={"nq": ct.default_nq,
                                                         "ids": insert_res.primary_keys[ct.default_nb // 2:],
                                                         "limit": ct.default_limit}
                                            )
예제 #7
0
 def do_search():
     while True:
         search_res, _ = collection_w.search(cf.gen_vectors(1, ct.default_dim),
                                             ct.default_float_vec_field_name,
                                             ct.default_search_params, ct.default_limit)
         log.debug(search_res[0].ids)
         assert len(search_res[0].ids) == ct.default_limit
예제 #8
0
 def test_insert_dataframe_order_inconsistent_schema(self):
     """
     target: test insert with dataframe fields inconsistent with schema
     method: insert dataframe, and fields order inconsistent with schema
     expected: assert num entities
     """
     c_name = cf.gen_unique_str(prefix)
     collection_w = self.init_collection_wrap(name=c_name)
     nb = 10
     int_values = pd.Series(data=[i for i in range(nb)])
     float_values = pd.Series(data=[float(i) for i in range(nb)],
                              dtype="float32")
     float_vec_values = cf.gen_vectors(nb, ct.default_dim)
     df = pd.DataFrame({
         ct.default_float_field_name: float_values,
         ct.default_float_vec_field_name: float_vec_values,
         ct.default_int64_field_name: int_values
     })
     error = {
         ct.err_code: 0,
         ct.err_msg: 'The types of schema and data do not match'
     }
     collection_w.insert(data=df,
                         check_task=CheckTasks.err_res,
                         check_items=error)
예제 #9
0
    def test_compact_and_index(self):
        """
        target: test compact and create index
        method: 1.insert data into two segments
                2.compact
                3.create index
                4.load and search
        expected: Verify search result and index info
        """
        collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb,
                                                                       is_dup=False)

        # compact
        collection_w.compact()
        collection_w.wait_for_compaction_completed()
        collection_w.get_compaction_plans(check_task=CheckTasks.check_merge_compact)

        # create index
        collection_w.create_index(ct.default_float_vec_field_name, ct.default_index)
        log.debug(collection_w.index())

        # search
        collection_w.load()
        search_res, _ = collection_w.search(cf.gen_vectors(ct.default_nq, ct.default_dim),
                                            ct.default_float_vec_field_name,
                                            ct.default_search_params, ct.default_limit)
        assert len(search_res) == ct.default_nq
        for hits in search_res:
            assert len(hits) == ct.default_limit
예제 #10
0
    def test_compact_delete_and_search(self):
        """
        target: test delete and compact segment, and search
        method: 1.create collection and insert
                2.delete part entities
                3.compact
                4.load and search
        expected: Verify search result
        """
        collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix), shards_num=1)
        df = cf.gen_default_dataframe_data()
        insert_res, _ = collection_w.insert(df)

        expr = f'{ct.default_int64_field_name} in {insert_res.primary_keys[:ct.default_nb // 2]}'
        collection_w.delete(expr)
        assert collection_w.num_entities == ct.default_nb

        sleep(ct.compact_retention_duration + 1)
        collection_w.compact()
        collection_w.wait_for_compaction_completed()
        collection_w.get_compaction_plans(check_task=CheckTasks.check_delete_compact)

        # search
        collection_w.load()
        search_res, _ = collection_w.search(cf.gen_vectors(ct.default_nq, ct.default_dim),
                                            ct.default_float_vec_field_name,
                                            ct.default_search_params, ct.default_limit,
                                            check_task=CheckTasks.check_search_results,
                                            check_items={"nq": ct.default_nq,
                                                         "ids": insert_res.primary_keys[ct.default_nb // 2:],
                                                         "limit": ct.default_limit}
                                            )
        collection_w.query("int64 in [0]", check_task=CheckTasks.check_query_empty)
예제 #11
0
 def do_search():
     """ do search """
     search_res, is_succ = collection_w.search(cf.gen_vectors(1, ct.default_dim),
                                               ct.default_float_vec_field_name, ct.default_search_params,
                                               ct.default_limit, check_task=CheckTasks.check_nothing)
     assert len(search_res) == 1
     return search_res, is_succ
예제 #12
0
 def keep_running(self):
     while True:
         search_vec = cf.gen_vectors(5, ct.default_dim)
         t0 = time.time()
         _, result = self.c_wrap.search(
             data=search_vec,
             anns_field=ct.default_float_vec_field_name,
             param={"nprobe": 32},
             limit=1,
             timeout=timeout,
             enable_traceback=enable_traceback,
             check_task=CheckTasks.check_nothing)
         t1 = time.time()
         if result:
             self.rsp_times.append(t1 - t0)
             self.average_time = (
                 (t1 - t0) + self.average_time * self._succ) / (self._succ +
                                                                1)
             self._succ += 1
             log.debug(
                 f"search success, time: {t1 - t0:.4f}, average_time: {self.average_time:.4f}"
             )
         else:
             self._fail += 1
         sleep(constants.WAIT_PER_OP / 10)
예제 #13
0
 def test_calc_distance_right_vector_and_collection_ids(
         self, metric_field, metric, sqrt):
     """
     target: test calculated distance from collection entities
     method: set right vectors as random vectors, left vectors from collection
     expected: distance calculated successfully
     """
     self._connect()
     nb = 10
     collection_w, vectors, _, insert_ids = self.init_collection_general(
         prefix, True, nb)
     middle = len(insert_ids) // 2
     vectors = vectors[0].loc[:, default_field_name]
     vectors_l = vectors[:middle]
     vectors_r = cf.gen_vectors(nb, default_dim)
     op_l = {
         "ids": insert_ids[:middle],
         "collection": collection_w.name,
         "field": default_field_name
     }
     op_r = {"float_vectors": vectors_r}
     params = {metric_field: metric, "sqrt": sqrt}
     self.utility_wrap.calc_distance(op_l,
                                     op_r,
                                     params,
                                     check_task=CheckTasks.check_distance,
                                     check_items={
                                         "vectors_l": vectors_l,
                                         "vectors_r": vectors_r,
                                         "metric": metric,
                                         "sqrt": sqrt
                                     })
예제 #14
0
    def test_partition_release(self):
        """
        target: verify release partition
        method: 1. create a collection and several partitions
                2. insert data into each partition
                3. flush and load the partitions
                4. release partition1
                5. release partition1 twice
        expected: 1. the released partition is released
                  2. the other partition is not released
        """
        # create collection
        collection_w = self.init_collection_wrap()

        # create two partitions
        partition_w1 = self.init_partition_wrap(collection_w)
        partition_w2 = self.init_partition_wrap(collection_w)

        # insert data to two partition
        partition_w1.insert(cf.gen_default_list_data())
        partition_w2.insert(cf.gen_default_list_data())

        # load two partitions
        partition_w1.load()
        partition_w2.load()

        # search two partitions
        search_vectors = cf.gen_vectors(1, ct.default_dim)
        res1, _ = partition_w1.search(
            data=search_vectors,
            anns_field=ct.default_float_vec_field_name,
            params={"nprobe": 32},
            limit=1)
        res2, _ = partition_w2.search(
            data=search_vectors,
            anns_field=ct.default_float_vec_field_name,
            params={"nprobe": 32},
            limit=1)
        assert len(res1) == 1 and len(res2) == 1

        # release the first partition
        partition_w1.release()

        # check result
        res1, _ = partition_w1.search(
            data=search_vectors,
            anns_field=ct.default_float_vec_field_name,
            params={"nprobe": 32},
            limit=1,
            check_task=ct.CheckTasks.err_res,
            check_items={
                ct.err_code: 1,
                ct.err_msg: "partitions have been released"
            })
        res2, _ = partition_w2.search(
            data=search_vectors,
            anns_field=ct.default_float_vec_field_name,
            params={"nprobe": 32},
            limit=1)
        assert len(res2) == 1
예제 #15
0
 def test_calc_distance_right_vectors_and_partition_ids(
         self, metric_field, metric, sqrt):
     """
     target: test calculated distance between vectors and partition entities
     method: set right vectors as random vectors, left vectors are entities
     expected: distance calculated successfully
     """
     self._connect()
     nb = 10
     collection_w, vectors, _, insert_ids = self.init_collection_general(
         prefix, True, nb, partition_num=1)
     middle = len(insert_ids) // 2
     partitions = collection_w.partitions
     vectors_r = cf.gen_vectors(nb // 2, default_dim)
     op_r = {"float_vectors": vectors_r}
     params = {metric_field: metric, "sqrt": sqrt}
     for i in range(len(partitions)):
         vectors_l = vectors[i].loc[:, default_field_name]
         op_l = {
             "ids": insert_ids[middle:],
             "collection": collection_w.name,
             "partition": partitions[i].name,
             "field": default_field_name
         }
         self.utility_wrap.calc_distance(
             op_l,
             op_r,
             params,
             check_task=CheckTasks.check_distance,
             check_items={
                 "vectors_l": vectors_l,
                 "vectors_r": vectors_r,
                 "metric": metric,
                 "sqrt": sqrt
             })
예제 #16
0
 def test_calc_distance_default(self):
     """
     target: test calculated distance with default params
     method: calculated distance between two random vectors
     expected: distance calculated successfully
     """
     self._connect()
     vectors_l = cf.gen_vectors(default_nb, default_dim)
     vectors_r = cf.gen_vectors(default_nb, default_dim)
     op_l = {"float_vectors": vectors_l}
     op_r = {"float_vectors": vectors_r}
     self.utility_wrap.calc_distance(op_l,
                                     op_r,
                                     check_task=CheckTasks.check_distance,
                                     check_items={
                                         "vectors_l": vectors_l,
                                         "vectors_r": vectors_r
                                     })
예제 #17
0
    def test_chaos_memory_stress_replicas_OOM(self, prepare_collection, mode):
        """
        target: test apply memory stress during loading, and querynode OOMKilled
        method: 1.Deploy and limit querynode memory limit 6Gi
                2.Create collection and insert 1000,000 entities
                3.Apply memory stress and querynode OOMKilled during loading replicas
        expected: Verify the mic is available to load and search querynode restart
        """
        collection_w = prepare_collection
        utility_w = ApiUtilityWrapper()

        chaos_config = gen_experiment_config("./chaos_objects/memory_stress/chaos_querynode_memory_stress.yaml")
        chaos_config['spec']['mode'] = mode
        chaos_config['spec']['duration'] = '3m'
        chaos_config['spec']['stressors']['memory']['size'] = '6Gi'
        log.debug(chaos_config)
        chaos_res = CusResource(kind=chaos_config['kind'],
                                group=constants.CHAOS_GROUP,
                                version=constants.CHAOS_VERSION,
                                namespace=constants.CHAOS_NAMESPACE)

        chaos_res.create(chaos_config)
        log.debug("chaos injected")
        collection_w.load(replica_number=2, timeout=60, _async=True)

        utility_w.wait_for_loading_complete(collection_w.name)
        progress, _ = utility_w.loading_progress(collection_w.name)
        assert progress["loading_progress"] == '100%'

        sleep(180)
        chaos_res.delete(metadata_name=chaos_config.get('metadata', None).get('name', None))

        # TODO search failed
        search_res, _ = collection_w.search(cf.gen_vectors(1, dim=self.dim),
                                            ct.default_float_vec_field_name, ct.default_search_params,
                                            ct.default_limit, timeout=120)
        assert 1 == len(search_res) and ct.default_limit == len(search_res[0])

        collection_w.release()
        collection_w.load(replica_number=2)
        search_res, _ = collection_w.search(cf.gen_vectors(1, dim=self.dim),
                                            ct.default_float_vec_field_name, ct.default_search_params,
                                            ct.default_limit, timeout=120)
        assert 1 == len(search_res) and ct.default_limit == len(search_res[0])
예제 #18
0
    def test_chaos_memory_stress_querynode(self, connection, chaos_yaml):
        """
        target: explore query node behavior after memory stress chaos injected and recovered
        method: 1. Create a collection, insert some data
                2. Inject memory stress chaos
                3. Start a threas to load, search and query
                4. After chaos duration, check query search success rate
                5. Delete chaos or chaos finished finally
        expected: 1.If memory is insufficient, querynode is OOMKilled and available after restart
                  2.If memory is sufficient, succ rate of query and search both are 1.0
        """
        c_name = 'chaos_memory_nx6DNW4q'
        collection_w = ApiCollectionWrapper()
        collection_w.init_collection(c_name)
        log.debug(collection_w.schema)
        log.debug(collection_w._shards_num)

        # apply memory stress chaos
        chaos_config = gen_experiment_config(chaos_yaml)
        log.debug(chaos_config)
        chaos_res = CusResource(kind=chaos_config['kind'],
                                group=constants.CHAOS_GROUP,
                                version=constants.CHAOS_VERSION,
                                namespace=constants.CHAOS_NAMESPACE)
        chaos_res.create(chaos_config)
        log.debug("chaos injected")
        duration = chaos_config.get('spec').get('duration')
        duration = duration.replace('h', '*3600+').replace(
            'm', '*60+').replace('s', '*1+') + '+0'
        meta_name = chaos_config.get('metadata').get('name')
        # wait memory stress
        sleep(constants.WAIT_PER_OP * 2)

        # try to do release, load, query and serach in a duration time loop
        try:
            start = time.time()
            while time.time() - start < eval(duration):
                collection_w.release()
                collection_w.load()

                term_expr = f'{ct.default_int64_field_name} in {[random.randint(0, 100)]}'
                query_res, _ = collection_w.query(term_expr)
                assert len(query_res) == 1

                search_res, _ = collection_w.search(
                    cf.gen_vectors(1, ct.default_dim),
                    ct.default_float_vec_field_name, ct.default_search_params,
                    ct.default_limit)
                log.debug(search_res[0].ids)
                assert len(search_res[0].ids) == ct.default_limit

        except Exception as e:
            raise Exception(str(e))

        finally:
            chaos_res.delete(meta_name)
예제 #19
0
파일: checker.py 프로젝트: avmi/milvus
 def search(self):
     res, result = self.c_wrap.search(
         data=cf.gen_vectors(5, ct.default_dim),
         anns_field=ct.default_float_vec_field_name,
         param={"nprobe": 32},
         limit=1,
         timeout=timeout,
         check_task=CheckTasks.check_nothing
     )
     return res, result
예제 #20
0
    def test_milvus_default(self):
        from utils.util_log import test_log as log
        # create
        name = cf.gen_unique_str(prefix)
        t0 = datetime.datetime.now()
        collection_w = self.init_collection_wrap(name=name)
        tt = datetime.datetime.now() - t0
        log.debug(f"assert create: {tt}")
        assert collection_w.name == name

        # insert
        data = cf.gen_default_list_data()
        t0 = datetime.datetime.now()
        _, res = collection_w.insert(data)
        tt = datetime.datetime.now() - t0
        log.debug(f"assert insert: {tt}")
        assert res

        # flush
        t0 = datetime.datetime.now()
        assert collection_w.num_entities == len(data[0])
        tt = datetime.datetime.now() - t0
        log.debug(f"assert flush: {tt}")

        # search
        collection_w.load()
        search_vectors = cf.gen_vectors(1, ct.default_dim)
        t0 = datetime.datetime.now()
        res_1, _ = collection_w.search(data=search_vectors,
                                       anns_field=ct.default_float_vec_field_name,
                                       param={"nprobe": 16}, limit=1)
        tt = datetime.datetime.now() - t0
        log.debug(f"assert search: {tt}")
        assert len(res_1) == 1
        # collection_w.release()

        # index
        collection_w.insert(cf.gen_default_dataframe_data(nb=5000))
        assert collection_w.num_entities == len(data[0]) + 5000
        _index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": {"nlist": 64}}
        t0 = datetime.datetime.now()
        index, _ = collection_w.create_index(field_name=ct.default_float_vec_field_name,
                                             index_params=_index_params,
                                             name=cf.gen_unique_str())
        tt = datetime.datetime.now() - t0
        log.debug(f"assert index: {tt}")
        assert len(collection_w.indexes) == 1

        # query
        term_expr = f'{ct.default_int64_field_name} in [3001,4001,4999,2999]'
        t0 = datetime.datetime.now()
        res, _ = collection_w.query(term_expr)
        tt = datetime.datetime.now() - t0
        log.debug(f"assert query: {tt}")
        assert len(res) == 4
예제 #21
0
 def keep_running(self):
     while self._running is True:
         search_vec = cf.gen_vectors(5, ct.default_dim)
         _, result = self.c_wrapper.search(data=search_vec,
                                           params={"nprobe": 32},
                                           limit=1,
                                           check_task="nothing")
         if result is True:
             self._succ += 1
         else:
             self._fail += 1
예제 #22
0
    def test_memory_stress_replicas_cross_group_load_balance(self, prepare_collection):
        """
        target: test apply memory stress on one group and no load balance cross replica groups
        method: 1.Limit all querynodes memory 6Gi
                2.Create and insert 1000,000 entities
                3.Load collection with two replicas
                4.Apply memory stress on one grooup 80%
        expected: Verify that load balancing across groups is not occurring
        """
        collection_w = prepare_collection
        utility_w = ApiUtilityWrapper()
        release_name = "mic-memory"

        # load and searchc
        collection_w.load(replica_number=2)
        progress, _ = utility_w.loading_progress(collection_w.name)
        assert progress["loading_progress"] == "100%"
        seg_info_before, _ = utility_w.get_query_segment_info(collection_w.name)

        # get the replica and random chaos querynode
        replicas, _ = collection_w.get_replicas()
        group_nodes = list(replicas.groups[0].group_nodes)
        label = f"app.kubernetes.io/instance={release_name}, app.kubernetes.io/component=querynode"
        querynode_id_pod_pair = get_querynode_id_pod_pairs("chaos-testing", label)
        group_nodes_pod = [querynode_id_pod_pair[node_id] for node_id in group_nodes]

        # apply memory stress
        chaos_config = gen_experiment_config("./chaos_objects/memory_stress/chaos_replicas_memory_stress_pods.yaml")
        chaos_config['spec']['selector']['pods']['chaos-testing'] = group_nodes_pod
        log.debug(chaos_config)
        chaos_res = CusResource(kind=chaos_config['kind'],
                                group=constants.CHAOS_GROUP,
                                version=constants.CHAOS_VERSION,
                                namespace=constants.CHAOS_NAMESPACE)
        chaos_res.create(chaos_config)
        log.debug(f"Apply memory stress on querynode {group_nodes}, pod {group_nodes_pod}")

        duration = chaos_config.get('spec').get('duration')
        duration = duration.replace('h', '*3600+').replace('m', '*60+').replace('s', '*1+') + '+0'
        sleep(eval(duration))

        chaos_res.delete(metadata_name=chaos_config.get('metadata', None).get('name', None))

        # Verfiy auto load loadbalance
        seg_info_after, _ = utility_w.get_query_segment_info(collection_w.name)
        seg_distribution_before = cf.get_segment_distribution(seg_info_before)
        seg_distribution_after = cf.get_segment_distribution(seg_info_after)
        for node_id in group_nodes:
            assert len(seg_distribution_before[node_id]) == len(seg_distribution_after[node_id])

        search_res, _ = collection_w.search(cf.gen_vectors(1, dim=self.dim),
                                            ct.default_float_vec_field_name, ct.default_search_params,
                                            ct.default_limit, timeout=120)
        assert 1 == len(search_res) and ct.default_limit == len(search_res[0])
예제 #23
0
 def test_calc_distance_default_metric(self, sqrt):
     """
     target: test calculated distance with default param
     method: calculated distance with default metric
     expected: distance calculated successfully
     """
     self._connect()
     vectors_l = cf.gen_vectors(default_nb, default_dim)
     vectors_r = cf.gen_vectors(default_nb, default_dim)
     op_l = {"float_vectors": vectors_l}
     op_r = {"float_vectors": vectors_r}
     params = {"sqrt": sqrt}
     self.utility_wrap.calc_distance(op_l,
                                     op_r,
                                     params,
                                     check_task=CheckTasks.check_distance,
                                     check_items={
                                         "vectors_l": vectors_l,
                                         "vectors_r": vectors_r,
                                         "sqrt": sqrt
                                     })
예제 #24
0
 def test_collection_construct_no_column_dataframe(self):
     """
     target: test collection with dataframe without columns
     method: dataframe without columns
     expected: raise exception
     """
     self._connect()
     c_name = cf.gen_unique_str(prefix)
     df = pd.DataFrame({' ': cf.gen_vectors(3, 2)})
     error = {ct.err_code: 0, ct.err_msg: "Field name should not be empty"}
     self.collection_wrap.init_collection(name=c_name, schema=None, data=df,
                                          check_task=CheckTasks.err_res, check_items=error)
예제 #25
0
    def test_partition_release_after_collection_released(self):
        """
        target: verify release a partition after the collection released
        method: 1.create a collection and partition
                2. insert some data
                2. release the collection
                2. release the partition
        expected: partition released successfully
        """
        # create collection
        collection_w = self.init_collection_wrap()

        # create partition
        partition_name = cf.gen_unique_str(prefix)
        partition_w = self.init_partition_wrap(collection_w, partition_name)
        assert collection_w.has_partition(partition_name)[0]

        # insert data to partition
        data = cf.gen_default_list_data()
        partition_w.insert(data)
        assert partition_w.num_entities == len(data[0])
        assert collection_w.num_entities == len(data[0])

        # load partition
        partition_w.load()

        # search of partition
        search_vectors = cf.gen_vectors(1, ct.default_dim)
        res_1, _ = partition_w.search(
            data=search_vectors,
            anns_field=ct.default_float_vec_field_name,
            params={"nprobe": 32},
            limit=1)
        assert len(res_1) == 1

        # release collection
        collection_w.release()

        # search of partition
        res_2, _ = partition_w.search(
            data=search_vectors,
            anns_field=ct.default_float_vec_field_name,
            params={"nprobe": 32},
            limit=1,
            check_task=ct.CheckTasks.err_res,
            check_items={
                ct.err_code: 0,
                ct.err_msg: "not loaded into memory"
            })
        # release partition
        partition_w.release()
예제 #26
0
 def test_calc_distance_not_match_dim(self):
     """
     target: test calculated distance with invalid vectors
     method: input invalid vectors type and value
     expected: raise exception
     """
     self._connect()
     dim = 129
     vector_l = cf.gen_vectors(default_nb, default_dim)
     vector_r = cf.gen_vectors(default_nb, dim)
     op_l = {"float_vectors": vector_l}
     op_r = {"float_vectors": vector_r}
     self.utility_wrap.calc_distance(
         op_l,
         op_r,
         check_task=CheckTasks.err_res,
         check_items={
             "err_code":
             1,
             "err_msg":
             "Cannot calculate distance between "
             "vectors with different dimension"
         })
예제 #27
0
 def keep_running(self):
     while self._running is True:
         search_vec = cf.gen_vectors(5, ct.default_dim)
         _, result = self.c_wrap.search(
                             data=search_vec,
                             anns_field=ct.default_float_vec_field_name,
                             param={"nprobe": 32},
                             limit=1, check_task='check_nothing'
                         )
         if result:
             self._succ += 1
         else:
             self._fail += 1
         sleep(constants.WAIT_PER_OP / 10)
예제 #28
0
 def test_calc_distance_invalid_using(self, get_support_metric_field):
     """
     target: test calculated distance with invalid using
     method: input invalid using
     expected: raise exception
     """
     self._connect()
     vectors_l = cf.gen_vectors(default_nb, default_dim)
     vectors_r = cf.gen_vectors(default_nb, default_dim)
     op_l = {"float_vectors": vectors_l}
     op_r = {"float_vectors": vectors_r}
     metric_field = get_support_metric_field
     params = {metric_field: "L2", "sqrt": True}
     using = "empty"
     self.utility_wrap.calc_distance(op_l,
                                     op_r,
                                     params,
                                     using=using,
                                     check_task=CheckTasks.err_res,
                                     check_items={
                                         "err_code": 1,
                                         "err_msg": "should create connect"
                                     })
예제 #29
0
 def test_insert_list_order_inconsistent_schema(self):
     """
     target: test insert data fields order inconsistent with schema
     method: insert list data, data fields order inconsistent with schema
     expected: raise exception
     """
     c_name = cf.gen_unique_str(prefix)
     collection_w = self.init_collection_wrap(name=c_name)
     nb = 10
     int_values = [i for i in range(nb)]
     float_values = [np.float32(i) for i in range(nb)]
     float_vec_values = cf.gen_vectors(nb, ct.default_dim)
     data = [float_values, int_values, float_vec_values]
     error = {ct.err_code: 0, ct.err_msg: 'The types of schema and data do not match'}
     collection_w.insert(data=data, check_task=CheckTasks.err_res, check_items=error)
예제 #30
0
 def test_insert_vector_value_less(self):
     """
     target: test insert vector value less than other
     method: vec field value less than int field
     expected: raise exception
     """
     c_name = cf.gen_unique_str(prefix)
     collection_w = self.init_collection_wrap(name=c_name)
     nb = 10
     int_values = [i for i in range(nb)]
     float_values = [np.float32(i) for i in range(nb)]
     float_vec_values = cf.gen_vectors(nb - 1, ct.default_dim)
     data = [int_values, float_values, float_vec_values]
     error = {ct.err_code: 0, ct.err_msg: 'Arrays must all be same length.'}
     collection_w.insert(data=data, check_task=CheckTasks.err_res, check_items=error)